forked from colonelpanic/dotfiles
Remove resources/python. All that stuff belongs in dotfiles/lib/python
This commit is contained in:
@@ -1,73 +0,0 @@
|
||||
import random
|
||||
import math
|
||||
|
||||
class BirthdayProblem(object):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
def birthday_problem(problem_size=365):
|
||||
birthdays = set()
|
||||
while True:
|
||||
new_birthday = random.randint(1, problem_size)
|
||||
if new_birthday in birthdays:
|
||||
return len(birthdays) + 1
|
||||
birthdays.add(new_birthday)
|
||||
|
||||
|
||||
def theoretical_average(problem_size):
|
||||
probabilities = []
|
||||
contributions = []
|
||||
for n in range(1, problem_size):
|
||||
probability = (float(n-1) / problem_size) * falling_factorial_over_exponentiation(problem_size, n-1)
|
||||
contribution = n * probability
|
||||
probabilities.append(probability)
|
||||
contributions.append(contribution)
|
||||
return sum(contributions)
|
||||
|
||||
|
||||
def falling_factorial(n, k):
|
||||
product = 1
|
||||
while k > 0:
|
||||
product *= n
|
||||
n -= 1
|
||||
k -= 1
|
||||
return product
|
||||
|
||||
|
||||
|
||||
def falling_factorial_over_exponentiation(n, k):
|
||||
orig = n
|
||||
product = float(1)
|
||||
while k > 0:
|
||||
product *= n
|
||||
product = product/orig
|
||||
n -= 1
|
||||
k -= 1
|
||||
return product
|
||||
|
||||
|
||||
def run_birthday_problem_n_times(times_to_run, problem_size=365):
|
||||
return [birthday_problem(problem_size) for i in range(int(times_to_run))]
|
||||
|
||||
|
||||
def number_of_people_to_times_occured(runs):
|
||||
number_of_people_to_times_occured = {}
|
||||
for run in runs:
|
||||
number_of_people_to_times_occured[run] = number_of_people_to_times_occured.get(run, 0) + 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
times_to_run = 131072
|
||||
while times_to_run <= 131072:
|
||||
for problem_size in range(4000, 5000, 100):
|
||||
average = sum(run_birthday_problem_n_times(times_to_run, problem_size=problem_size))/float(times_to_run)
|
||||
print "problem size {3} ran {0} times, average was {1}, theoretical average is {2}".format(
|
||||
times_to_run,
|
||||
average,
|
||||
theoretical_average(problem_size),
|
||||
problem_size
|
||||
)
|
||||
print math.fabs(average - theoretical_average(problem_size))
|
||||
times_to_run *= 2
|
@@ -1,231 +0,0 @@
|
||||
import bisect
|
||||
|
||||
|
||||
class BTreePrinter(object):
|
||||
|
||||
number_width = 4
|
||||
|
||||
subtree_space = object()
|
||||
|
||||
def __init__(self, btree):
|
||||
self.btree = btree
|
||||
|
||||
def determine_width_of_node(self, node):
|
||||
if node is None: return 0
|
||||
return sum(map(self.determine_width_of_node, node.nodes)) + node.number_of_value_nodes * self.number_width
|
||||
|
||||
def determine_width_of_value_node(self, value_node):
|
||||
return self.determine_width_of_node(value_node.node) if value_node.node is not None else self.number_width
|
||||
|
||||
def print_tree(self):
|
||||
return self.print_levels_recursively([self.btree.head])
|
||||
|
||||
def print_levels_recursively(self, level):
|
||||
if all(map(lambda x: x is self.subtree_space, level)): return
|
||||
self.print_nodes_at_level(level)
|
||||
print ''
|
||||
self.print_levels_recursively(self.get_next_level(level))
|
||||
|
||||
def get_next_level(self, level):
|
||||
new_level = []
|
||||
for item in level:
|
||||
if item is self.subtree_space:
|
||||
new_level.append(item)
|
||||
elif item is not None:
|
||||
new_level.extend(item.nodes)
|
||||
new_level.append(self.subtree_space)
|
||||
|
||||
return new_level
|
||||
|
||||
def print_nodes_at_level(self, level):
|
||||
for item in level:
|
||||
if item is self.subtree_space:
|
||||
print ' ' * self.number_width,
|
||||
else:
|
||||
self.print_values_for_node(item)
|
||||
|
||||
def print_values_for_node(self, node):
|
||||
if node is None: return
|
||||
for value_node in node.value_nodes:
|
||||
print ' ' * self.determine_width_of_node(value_node.node),
|
||||
print '{num: ^{width}}'.format(num=value_node.value, width=self.number_width),
|
||||
print (' ' * (self.determine_width_of_node(node.rightmost_node))),
|
||||
|
||||
|
||||
class IntegrityChecker(object):
|
||||
|
||||
def __init__(self, btree):
|
||||
self.btree = btree
|
||||
|
||||
def check_integrity(self):
|
||||
return self.check_for_items_smaller_in_right_subtree(self.btree.head) and self.check_for_unmatched_parents(self.btree.head)
|
||||
|
||||
def check_for_unmatched_parents(self, subtree):
|
||||
if subtree is None:
|
||||
return True
|
||||
|
||||
for node in subtree.nodes:
|
||||
if node is None:
|
||||
continue
|
||||
if node.parent is not subtree:
|
||||
return False
|
||||
if not self.check_for_unmatched_parents(node):
|
||||
return False
|
||||
return True
|
||||
|
||||
def check_for_items_smaller_in_right_subtree(self, subtree):
|
||||
if subtree is None:
|
||||
return True
|
||||
|
||||
small_value = subtree.value_nodes[0].value
|
||||
for value_node in subtree.value_nodes[1:]:
|
||||
if not self.check_subtree_has_no_items_smaller_than(value_node.node, small_value):
|
||||
return False
|
||||
|
||||
if not self.check_for_items_smaller_in_right_subtree(subtree.value_nodes[0].node):
|
||||
return False
|
||||
|
||||
return self.check_subtree_has_no_items_smaller_than(subtree.rightmost_node, small_value)
|
||||
|
||||
def check_subtree_has_no_items_smaller_than(self, subtree, value):
|
||||
if subtree is None:
|
||||
return True
|
||||
for value_node in subtree.value_nodes:
|
||||
if value > value_node.value:
|
||||
return False
|
||||
if not self.check_subtree_has_no_items_smaller_than(value_node.node, value):
|
||||
return False
|
||||
return self.check_subtree_has_no_items_smaller_than(subtree.rightmost_node, value)
|
||||
|
||||
|
||||
class BTree(object):
|
||||
|
||||
@classmethod
|
||||
def build_with_value(cls, value):
|
||||
btree = cls()
|
||||
btree.head = Node(btree, [ValueNode(value)])
|
||||
return btree
|
||||
|
||||
def __init__(self):
|
||||
self.head = None
|
||||
self.inserted_items = []
|
||||
|
||||
def build_new_head(self, value_node):
|
||||
new_rightmost_node = self.head
|
||||
self.head = Node(self, [value_node])
|
||||
value_node.node.parent = self.head
|
||||
self.head.rightmost_node = new_rightmost_node
|
||||
new_rightmost_node.parent = self.head
|
||||
assert self.head.rightmost_node is not None
|
||||
return value_node
|
||||
|
||||
def insert(self, value):
|
||||
self.head.insert(value)
|
||||
self.inserted_items.append(value)
|
||||
self.head.check_integrity()
|
||||
if not IntegrityChecker(self).check_integrity():
|
||||
import ipdb; ipdb.set_trace()
|
||||
|
||||
promote_value_node = build_new_head
|
||||
|
||||
def __repr__(self):
|
||||
return "BTree({0})".format(repr(self.head))
|
||||
|
||||
|
||||
class ValueNode(object):
|
||||
|
||||
def __init__(self, value, node=None):
|
||||
self.value = value
|
||||
self.node = node
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.value < other.value
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.value > other.value
|
||||
|
||||
def __repr__(self):
|
||||
return "ValueNode({0}, {1})".format(repr(self.node), repr(self.value))
|
||||
|
||||
|
||||
class Node(object):
|
||||
|
||||
max_num_values = 3
|
||||
|
||||
def __init__(self, parent, value_nodes=None, rightmost_node=None):
|
||||
self.parent = parent
|
||||
self.value_nodes = value_nodes or []
|
||||
self.rightmost_node = rightmost_node
|
||||
self.claim_child_nodes()
|
||||
|
||||
def check_integrity(self):
|
||||
if self.is_leaf_node: return True
|
||||
if self.rightmost_node:
|
||||
return all(child_node.check_integrity() for child_node in self.nodes if child_node is not None)
|
||||
import ipdb; ipdb.set_trace()
|
||||
return False
|
||||
|
||||
def claim_child_nodes(self):
|
||||
for node in self.nodes:
|
||||
if node:
|
||||
node.parent = self
|
||||
|
||||
@property
|
||||
def is_leaf_node(self):
|
||||
return not any(self.nodes)
|
||||
|
||||
@property
|
||||
def number_of_value_nodes(self):
|
||||
return len(self.value_nodes)
|
||||
|
||||
@property
|
||||
def nodes(self):
|
||||
return [value_node.node for value_node in self.value_nodes] + [self.rightmost_node]
|
||||
|
||||
@property
|
||||
def values(self):
|
||||
return [value_node.value for value_node in self.value_nodes]
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.nodes[item]
|
||||
|
||||
def promote_value_node(self, value_node):
|
||||
bisect.insort(self.value_nodes, value_node)
|
||||
if value_node.node:
|
||||
value_node.node.parent = self
|
||||
self.maybe_rebalance()
|
||||
|
||||
def maybe_rebalance(self):
|
||||
if self.number_of_value_nodes < self.max_num_values:
|
||||
return
|
||||
value_node_to_promote = self.value_nodes[self.number_of_value_nodes/2]
|
||||
promoted_nodes_old_node = value_node_to_promote.node
|
||||
value_node_to_promote.node = Node(
|
||||
self.parent,
|
||||
value_nodes=self.value_nodes[:self.number_of_value_nodes/2],
|
||||
rightmost_node=promoted_nodes_old_node
|
||||
)
|
||||
self.value_nodes = self.value_nodes[self.number_of_value_nodes/2+1:]
|
||||
self.parent.promote_value_node(value_node_to_promote)
|
||||
self.check_integrity()
|
||||
|
||||
def insert(self, value):
|
||||
if self.is_leaf_node:
|
||||
value_node = ValueNode(value)
|
||||
bisect.insort(self.value_nodes, value_node)
|
||||
self.maybe_rebalance()
|
||||
return value_node
|
||||
|
||||
return self.pick_node(value).insert(value)
|
||||
|
||||
def pick_node(self, value):
|
||||
if self.rightmost_node is None:
|
||||
import ipdb; ipdb.set_trace()
|
||||
for value_node in self.value_nodes:
|
||||
if value < value_node.value:
|
||||
return value_node.node
|
||||
|
||||
return self.rightmost_node
|
||||
|
||||
def __repr__(self):
|
||||
return "Node({0}, {1})".format(", ".join(map(repr, self.value_nodes)), self.rightmost_node)
|
@@ -1,20 +0,0 @@
|
||||
def left_partials(incoming):
|
||||
product = 1
|
||||
for i in incoming:
|
||||
product *= i
|
||||
yield product
|
||||
|
||||
|
||||
def but_one(incoming):
|
||||
lpartials = list(left_partials(incoming))
|
||||
rproduct = 1
|
||||
result = [None]*len(incoming)
|
||||
for i in range(len(incoming)):
|
||||
back_index = len(incoming) - i - 1
|
||||
if back_index > 0:
|
||||
result[back_index] = rproduct * lpartials[back_index-1]
|
||||
if back_index < len(incoming):
|
||||
rproduct *= incoming[back_index]
|
||||
else:
|
||||
result[back_index] = rproduct
|
||||
return result
|
@@ -1,20 +0,0 @@
|
||||
import operator
|
||||
|
||||
def generate_decreasing_n_sequence_with_bounded_sum(sequence_length, sum_bound, value_bound=float('inf')):
|
||||
if sequence_length == 0:
|
||||
yield []
|
||||
return
|
||||
min_remaining = sequence_length*(sequence_length - 1)/2
|
||||
bound_for_current = min(sum_bound - min_remaining, value_bound)
|
||||
|
||||
for value in range(sequence_length, bound_for_current):
|
||||
for sequence in generate_decreasing_n_sequence_with_bounded_sum(sequence_length - 1, sum_bound - value, value_bound=value):
|
||||
yield [value] + sequence
|
||||
|
||||
|
||||
def build_products_to_sequences_map():
|
||||
product_to_sequences_map = {}
|
||||
for sequence in generate_decreasing_n_sequence_with_bounded_sum(4, 18):
|
||||
product = reduce(operator.mul, sequence, 1)
|
||||
product_to_sequences_map.setdefault(product, []).append(sequence)
|
||||
return product_to_sequences_map
|
@@ -1,57 +0,0 @@
|
||||
import collections
|
||||
import functools
|
||||
|
||||
from partialable import n_partialable
|
||||
|
||||
|
||||
def _compose2(f, g):
|
||||
return lambda *args, **kwargs: f(g(*args, **kwargs))
|
||||
|
||||
|
||||
@n_partialable(evaluation_checker=n_partialable.count_evaluation_checker(2))
|
||||
def compose_with_joiner(joiner, *functions):
|
||||
return reduce(joiner, functions)
|
||||
|
||||
|
||||
compose_one_arg = compose_with_joiner(_compose2)
|
||||
|
||||
|
||||
compose = compose_with_joiner(lambda f, g: _compose2(make_single_arity(f),
|
||||
force_args_return(g)))
|
||||
|
||||
|
||||
def make_single_arity(function):
|
||||
@functools.wraps(function)
|
||||
def wrapped(args):
|
||||
return function(*args)
|
||||
return wrapped
|
||||
|
||||
|
||||
def kwargs_make_single_arity(function):
|
||||
@functools.wraps(function)
|
||||
def wrapped(kwargs):
|
||||
return function(**kwargs)
|
||||
return wrapped
|
||||
|
||||
|
||||
def args_kwargs_make_single_arity(function):
|
||||
@functools.wraps(function)
|
||||
def wrapped((args, kwargs)):
|
||||
return function(*args, **kwargs)
|
||||
return wrapped
|
||||
|
||||
|
||||
def force_args_return(function):
|
||||
@functools.wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
value = function(*args, **kwargs)
|
||||
if not isinstance(value, collections.Iterable):
|
||||
value = (value,)
|
||||
return value
|
||||
return wrapped
|
||||
|
||||
|
||||
def tee(*functions):
|
||||
def wrapped(*args, **kwargs):
|
||||
return tuple(function(*args, **kwargs) for function in functions)
|
||||
return wrapped
|
@@ -1,12 +0,0 @@
|
||||
import compose
|
||||
|
||||
|
||||
def test_compose_handles_multiple_argument_output_and_non_iterable_output():
|
||||
assert compose.compose(lambda x: x*2,
|
||||
lambda x, y: x + y,
|
||||
lambda x, y, z: (2*(x - y), z))(1, 2, 3) == 2
|
||||
|
||||
|
||||
def test_tee():
|
||||
assert compose.compose(lambda x, y: x + y,
|
||||
compose.tee(lambda x: x + 1, lambda x: x - 1))(2) == 4
|
@@ -1,51 +0,0 @@
|
||||
import inspect
|
||||
|
||||
|
||||
class n_partialable(object):
|
||||
|
||||
@staticmethod
|
||||
def arity_evaluation_checker(function):
|
||||
is_class = inspect.isclass(function)
|
||||
if is_class:
|
||||
function = function.__init__
|
||||
function_info = inspect.getargspec(function)
|
||||
function_args = function_info.args
|
||||
if is_class:
|
||||
# This is to handle the fact that self will get passed in automatically.
|
||||
function_args = function_args[1:]
|
||||
def evaluation_checker(*args, **kwargs):
|
||||
acceptable_kwargs = function_args[len(args):]
|
||||
kwarg_keys = set(kwargs.keys())
|
||||
# Make sure that we didn't get an argument we can't handle.
|
||||
assert kwarg_keys.issubset(acceptable_kwargs)
|
||||
needed_args = function_args[len(args):]
|
||||
if function_info.defaults:
|
||||
needed_args = needed_args[:-len(function_info.defaults)]
|
||||
return not needed_args or kwarg_keys.issuperset(needed_args)
|
||||
return evaluation_checker
|
||||
|
||||
def __init__(self, function, evaluation_checker=None, args=None, kwargs=None):
|
||||
self.function = function
|
||||
self.evaluation_checker = (evaluation_checker or
|
||||
self.arity_evaluation_checker(function))
|
||||
self.args = args or ()
|
||||
self.kwargs = kwargs or {}
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
new_args = self.args + args
|
||||
new_kwargs = self.kwargs.copy()
|
||||
new_kwargs.update(kwargs)
|
||||
if self.evaluation_checker(*new_args, **new_kwargs):
|
||||
return self.function(*new_args, **new_kwargs)
|
||||
else:
|
||||
return type(self)(self.function, self.evaluation_checker,
|
||||
new_args, new_kwargs)
|
||||
|
||||
def __get__(self, obj, obj_type):
|
||||
bound = type(self)(self.function, self.evaluation_checker,
|
||||
args=self.args + (obj,), kwargs=self.kwargs)
|
||||
setattr(obj, self.function.__name__, bound)
|
||||
return bound
|
||||
|
||||
|
||||
n_partialable = n_partialable(n_partialable)
|
@@ -1,27 +0,0 @@
|
||||
import datetime
|
||||
|
||||
from invoke import task, run
|
||||
|
||||
|
||||
@task
|
||||
def histogram(ignore=''):
|
||||
result = run('git rev-list --all')
|
||||
date_to_adds = {}
|
||||
date_to_deletes = {}
|
||||
for sha in result.stdout.split('\n'):
|
||||
result = run('git diff-excluding {1} {0}~1 {0} --numstat'.format(sha, ignore), hide=True)
|
||||
added, deleted = get_total(result.stdout)
|
||||
iso8601 = run('git log {0} --pretty=format:%ai -1'.format(sha), hide=True).stdout.strip()
|
||||
commit_date = datetime.datetime.strptime(iso8601, "%Y-%m-%dT%H:%M:%S %z").date()
|
||||
date_to_adds[commit_date] = date_to_adds.get(commit_date) + added
|
||||
date_to_deletes[commit_date] = date_to_deletes.get(commit_date) + deleted
|
||||
print date_to_adds
|
||||
print date_to_deletes
|
||||
|
||||
|
||||
def get_total(output):
|
||||
try:
|
||||
return sum(int(line.split()[0]) for line in output.split('\n')), sum(int(line.split()[1]) for line in output.split('\n'))
|
||||
except:
|
||||
import ipdb; ipdb.set_trace()
|
||||
|
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from tox._config import parseconfig
|
||||
from yaml import dump
|
||||
|
||||
|
||||
class TravisFromTox(object):
|
||||
|
||||
def __init__(self, tox_config):
|
||||
self._tox_config = tox_config
|
||||
|
||||
def build_travis_dict(self):
|
||||
return {
|
||||
'language': 'python',
|
||||
'install': ['pip install "tox>=1.8.0"'],
|
||||
'script': 'tox',
|
||||
'env': self._get_environment_variables()
|
||||
}
|
||||
|
||||
def _get_environment_variables(self):
|
||||
return ['TOXENV={0}'.format(env) for env in self._tox_config.envlist]
|
||||
|
||||
def build_travis_yaml(self):
|
||||
return dump(self.build_travis_dict(), default_flow_style=False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print TravisFromTox(parseconfig()).build_travis_yaml()
|
@@ -1,118 +0,0 @@
|
||||
class XPathBuilder(object):
|
||||
|
||||
def __init__(self, nodes=(), relative=True, direct_child=False):
|
||||
self.nodes = tuple(nodes)
|
||||
self.relative = relative
|
||||
self.direct_child = direct_child
|
||||
|
||||
@property
|
||||
def xpath(self):
|
||||
return ('.' if self.relative else '') + ''.join(node.xpath
|
||||
for node in self.nodes)
|
||||
|
||||
@property
|
||||
def _or(self):
|
||||
updated_final_node = self.nodes[-1].make_or
|
||||
return type(self)(self.nodes[:-1] + (updated_final_node,),
|
||||
relative=self.relative, direct_child=self.direct_child)
|
||||
|
||||
def add_node(self, **kwargs):
|
||||
if 'direct_child' not in kwargs:
|
||||
kwargs['direct_child'] = self.direct_child
|
||||
return type(self)(self.nodes + (XPathNode(**kwargs),),
|
||||
relative=self.relative)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return self.add_node(element=attr)
|
||||
|
||||
def __call__(self, *predicates, **attributes):
|
||||
direct_child = attributes.pop('direct_child', None)
|
||||
assert len(self.nodes)
|
||||
updated_final_node = self.nodes[-1](predicates=predicates,
|
||||
attributes=attributes,
|
||||
direct_child=direct_child)
|
||||
return type(self)(self.nodes[:-1] + (updated_final_node,),
|
||||
relative=self.relative, direct_child=self.direct_child)
|
||||
|
||||
def attribute_contains(self, attribute, contains_string):
|
||||
updated_final_node = self.nodes[-1].add_contains_predicates(((attribute, contains_string),))
|
||||
return type(self)(self.nodes[:-1] + (updated_final_node,),
|
||||
relative=self.relative, direct_child=self.direct_child)
|
||||
|
||||
def with_classes(self, *classes):
|
||||
updated_final_node = self.nodes[-1].with_classes(classes)
|
||||
return type(self)(self.nodes[:-1] + (updated_final_node,),
|
||||
relative=self.relative, direct_child=self.direct_child)
|
||||
|
||||
with_class = with_classes
|
||||
|
||||
def apply_(self, tree):
|
||||
return tree.xpath(self.xpath)
|
||||
|
||||
def get_text_(self, tree):
|
||||
return self.apply_(tree)[0].text
|
||||
|
||||
|
||||
class XPathNode(object):
|
||||
|
||||
@staticmethod
|
||||
def attribute_contains(attribute, contained_string):
|
||||
return "contains(concat(' ',normalize-space(@{0}),' '),' {1} ')".format(
|
||||
attribute, contained_string
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def attribute_equal(attribute, value):
|
||||
return "@{0} = '{1}'".format(attribute, value)
|
||||
|
||||
def __init__(self, element='*', attributes=None, predicates=None,
|
||||
direct_child=False, use_or=False):
|
||||
self.element = element
|
||||
self.predicates = tuple(predicates) if predicates else ()
|
||||
if attributes:
|
||||
self.predicates += tuple([self.attribute_equal(attribute, value)
|
||||
for attribute, value in attributes.items()])
|
||||
self.direct_child = direct_child
|
||||
self.use_or = use_or
|
||||
|
||||
@property
|
||||
def make_or(self):
|
||||
return self(use_or=True)
|
||||
|
||||
@property
|
||||
def separator(self):
|
||||
return '/' if self.direct_child else '//'
|
||||
|
||||
@property
|
||||
def xpath(self):
|
||||
return '{0}{1}{2}'.format(self.separator, self.element,
|
||||
self.predicate_string)
|
||||
|
||||
@property
|
||||
def predicate_joiner(self):
|
||||
return ' or ' if self.use_or else ' and '
|
||||
|
||||
@property
|
||||
def predicate_string(self):
|
||||
if self.predicates:
|
||||
predicate = self.predicate_joiner.join(self.predicates)
|
||||
return '[ {0} ]'.format(predicate)
|
||||
else:
|
||||
return ''
|
||||
|
||||
def __call__(self, element=None, predicates=(), attributes=None,
|
||||
direct_child=None, use_or=False):
|
||||
direct_child = self.direct_child if direct_child is None else direct_child
|
||||
element = self.element if element is None else element
|
||||
new_predicates = self.predicates + tuple(predicates)
|
||||
return type(self)(element, attributes, new_predicates,
|
||||
direct_child, use_or)
|
||||
|
||||
def add_contains_predicates(self, kv_pairs):
|
||||
predicates = [self.attribute_contains(attribute, contains_string)
|
||||
for attribute, contains_string in kv_pairs]
|
||||
return self(predicates=predicates)
|
||||
|
||||
def with_classes(self, classes):
|
||||
return self.add_contains_predicates(('class', class_string)
|
||||
for class_string in classes)
|
Reference in New Issue
Block a user