repo
stringlengths 1
29
| path
stringlengths 24
332
| code
stringlengths 39
579k
|
---|---|---|
libarchive | libarchive//entry.pyfile:/entry.py:function:format_time/format_time | def format_time(seconds, nanos):
""" return float of seconds.nanos when nanos set, or seconds when not """
if nanos:
return float(seconds) + float(nanos) / 1000000000.0
return int(seconds)
|
menpowidgets-0.3.0 | menpowidgets-0.3.0//menpowidgets/utils.pyfile:/menpowidgets/utils.py:function:lists_are_the_same/lists_are_the_same | def lists_are_the_same(a, b):
"""
Function that checks if two `lists` have the same elements in the same
order.
Returns
-------
_lists_are_the_same : `bool`
``True`` if the lists are the same.
"""
if len(a) == len(b):
for i, j in zip(a, b):
if i != j:
return False
return True
else:
return False
|
cloudsmith_cli | cloudsmith_cli//core/utils.pyfile:/core/utils.py:function:get_query_kwargs/get_query_kwargs | def get_query_kwargs(**kwargs):
"""Construct page and page size kwargs (if present)."""
query_kwargs = {}
query = kwargs.pop('query')
if query:
query_kwargs['query'] = query
return query_kwargs
|
bpy | bpy//ops/mesh.pyfile:/ops/mesh.py:function:extrude_verts_indiv/extrude_verts_indiv | def extrude_verts_indiv(mirror: bool=False):
"""Extrude individual vertices only
:param mirror: Mirror Editing
:type mirror: bool
"""
pass
|
twodlearn | twodlearn//core/array.pyfile:/core/array.py:function:is_square_matrix/is_square_matrix | def is_square_matrix(array):
""" Returns if array is a square matrix """
return array.shape.ndims == 2 and array.shape[0].value == array.shape[1
].value
|
tao1-0.2.5 | tao1-0.2.5//tao1/libs/sites/sites.pyfile:/tao1/libs/sites/sites.py:function:role_users/role_users | def role_users(request, role):
""" Users receive a certain role """
db = request.db
if type(role) == str:
doc = db.doc.find_one({'_id': role}, {'users': 1})
else:
doc = db.doc.find_one({'_id': {'$in': role}}, {'users': 1})
return doc['users'].keys()
|
nbb-0.1.0 | nbb-0.1.0//versioneer.pyfile:/versioneer.py:function:render_pep440_old/render_pep440_old | def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance'] or pieces['dirty']:
rendered += '.post%d' % pieces['distance']
if pieces['dirty']:
rendered += '.dev0'
else:
rendered = '0.post%d' % pieces['distance']
if pieces['dirty']:
rendered += '.dev0'
return rendered
|
csirtg_domainsml_tf-0.0a5 | csirtg_domainsml_tf-0.0a5//versioneer.pyfile:/versioneer.py:function:plus_or_dot/plus_or_dot | def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if '+' in pieces.get('closest-tag', ''):
return '.'
return '+'
|
treelite-0.32.data | treelite-0.32.data//purelib/treelite/frontend.pyfile:/purelib/treelite/frontend.py:function:_isascii/_isascii | def _isascii(string):
"""Tests if a given string is pure ASCII; works for both Python 2 and 3"""
try:
return len(string) == len(string.encode())
except UnicodeDecodeError:
return False
except UnicodeEncodeError:
return False
|
bifacial_radiance-0.3.3.2 | bifacial_radiance-0.3.3.2//bifacial_radiance/load.pyfile:/bifacial_radiance/load.py:function:load_inputvariablesfile/load_inputvariablesfile | def load_inputvariablesfile(intputfile):
"""
Loads inputfile which must be in the bifacial_radiance directory,
and must be a ``.py`` file with all the variables, and organizes the variables
into dictionaries that it returns
Parameters
----------
inputfile : str
String of a ``.py`` file in the bifacial_radiance directory.
Returns
-------
simulationParamsDict : Dictionary
Dictionary containing the parameters for performing the simulation,
including simulation names, and types of sky, fixed or tracked systems:
======================== ======= =============================
variable type Description
======================== ======= =============================
testfolder str Path to testfolder
weatherfile str File (with path) to weatherfile
getEPW bool
simulationname str Name for simulation
moduletype str Module name as is / or will be defined in JSON
rewritemodule bool If moduletype exists in JSON, True will rewrite with new parameters
cellLevelmodule bool
axisofrotationtorquetube bool
torqueTube bool
hpc bool
tracking bool
cumulativesky bool
daydateSimulation bool
timeIndexSimulation bool
======================== ======= =============================
sceneParamsDict : Dictionary
gcrorpitch, gcr, pitch, albedo, nMods, nRows,
hub_height, clearance_height, azimuth, hub_height, axis_Azimuth
timeControlParamsDict : Dictionary
hourstart, hourend, daystart, dayend, monthstart, monthend,
timestampstart, timestampend,
moduleParamsDict : Dictionary
numpanels, x, y, bifi, xgap, ygap, zgap
cellLevelModuleParamsDict : Dictionary
numcellsx, numcellsy, xcell, ycell, xcellgap, ycellgap
trackingParamsDict : Dictionary
backtrack, limit_angle,angle_delta
torquetubeParamsDict : Dictionary
diameter, tubetype, torqueTubeMaterial
analysisParamsDict : Dictionary
sensorsy, modWanted, rowWanted
"""
import inputfile as ibf
simulationParamsDict = {'testfolder': ibf.testfolder, 'epwfile': ibf.
epwfile, 'simulationname': ibf.simulationname, 'moduletype': ibf.
moduletype, 'rewriteModule': ibf.rewriteModule, 'cellLevelModule':
ibf.cellLevelModule, 'axisofrotationTorqueTube': ibf.
axisofrotationTorqueTube, 'torqueTube': ibf.torqueTube}
simulationControlDict = {'fixedortracked': ibf.fixedortracked,
'cumulativeSky': ibf.cumulativeSky, 'timestampSimulation': ibf.
timestampSimulation, 'timeIndexSimulation': ibf.timeIndexSimulation,
'hpc': ibf.hpc, 'daydateSimulation': ibf.dayDateSimulation,
'singleKeySimulation': ibf.singleKeySimulation,
'singleKeyRangeSimulation': ibf.singleKeyRangeSimulation}
timeControlParamsDict = {'timestampstart': ibf.timestampstart,
'timestampend': ibf.timestampend, 'startdate': ibf.startdate,
'enddate': ibf.enddate, 'singlekeystart': ibf.singlekeystart,
'singlekeyend': ibf.singlekeyend, 'day_date': ibf.daydate}
moduleParamsDict = {'numpanels': ibf.numpanels, 'x': ibf.x, 'y': ibf.y,
'bifi': ibf.bifi, 'xgap': ibf.xgap, 'ygap': ibf.ygap, 'zgap': ibf.zgap}
sceneParamsDict = {'gcr': ibf.gcr, 'pitch': ibf.pitch, 'albedo': ibf.
albedo, 'nMods': ibf.nMods, 'nRows': ibf.nRows, 'azimuth': ibf.
azimuth_ang, 'tilt': ibf.tilt, 'clearance_height': ibf.
clearance_height, 'hub_height': ibf.hub_height, 'axis_azimuth': ibf
.axis_azimuth}
trackingParamsDict = {'backtrack': ibf.backtrack, 'limit_angle': ibf.
limit_angle, 'angle_delta': ibf.angle_delta}
torquetubeParamsDict = {'diameter': ibf.diameter, 'tubetype': ibf.
tubetype, 'torqueTubeMaterial': ibf.torqueTubeMaterial}
analysisParamsDict = {'sensorsy': ibf.sensorsy, 'modWanted': ibf.
modWanted, 'rowWanted': ibf.rowWanted}
cellLevelModuleParamsDict = {'numcellsx': ibf.numcellsx, 'numcellsy':
ibf.numcellsy, 'xcell': ibf.xcell, 'ycell': ibf.ycell, 'xcellgap':
ibf.xcellgap, 'ycellgap': ibf.ycellgap}
return (simulationParamsDict, simulationControlDict,
timeControlParamsDict, moduleParamsDict, cellLevelModuleParamsDict,
sceneParamsDict, trackingParamsDict, analysisParamsDict)
|
dropbox | dropbox//sharing.pyclass:ShareFolderErrorBase/bad_path | @classmethod
def bad_path(cls, val):
"""
Create an instance of this class set to the ``bad_path`` tag with value
``val``.
:param SharePathError val:
:rtype: ShareFolderErrorBase
"""
return cls('bad_path', val)
|
onnx_coreml | onnx_coreml//_operators_nd.pyfile:/_operators_nd.py:function:_convert_round/_convert_round | def _convert_round(builder, node, graph, err):
"""
convert to CoreML Round Layer:
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L5029
"""
builder.add_round(name=node.name, input_name=node.inputs[0],
output_name=node.outputs[0])
|
fastscore | fastscore//utils/utils.pyfile:/utils/utils.py:function:compare_floats/compare_floats | def compare_floats(float1, float2, f_error=0.01, zero_tolerance=1e-08,
inf_tolerance=1e+80):
"""
Compare two numeric objects according to the following algorithm:
1. If float1 < zero_tolerance and float2 < zero_tolerance, then returns True.
2. If abs(float1) > inf_tolerance and abs(float2) > inf_tolerance, and
sign(float1) = sign(float2), then returns True.
3. If zero_tolerance < abs(float1, float2) < inf_tolerance, and
2*abs(float1 - float2)/(abs(float1) + abs(float2)) <= f_error, return True.
4. Otherwise, return False.
:param float1: First numeric field.
:param float2: Second numeric field.
:param f_error: Fractional margin of error (default: 0.01)
:param zero_tolerance: Zero tolerance (default: 1e-8)
:param inf_tolerance: Infinite tolerance (default: 1e80)
"""
if abs(float1) < zero_tolerance and abs(float2) < zero_tolerance:
return True
elif abs(float1) > inf_tolerance and abs(float2) > inf_tolerance:
if float1 / float2 > 0:
return True
else:
return False
elif 2 * abs(float1 - float2) / (abs(float1) + abs(float2)) <= f_error:
return True
else:
return False
|
sklearn | sklearn//neural_network/_base.pyfile:/neural_network/_base.py:function:inplace_identity_derivative/inplace_identity_derivative | def inplace_identity_derivative(Z, delta):
"""Apply the derivative of the identity function: do nothing.
Parameters
----------
Z : {array-like, sparse matrix}, shape (n_samples, n_features)
The data which was output from the identity activation function during
the forward pass.
delta : {array-like}, shape (n_samples, n_features)
The backpropagated error signal to be modified inplace.
"""
|
djira | djira//ep_registry.pyfile:/ep_registry.py:function:ep_ping/ep_ping | def ep_ping():
"""Check service availability.
"""
return 'pong'
|
hansel-2.0.1 | hansel-2.0.1//hansel/_utils.pyfile:/hansel/_utils.py:function:_is_crumb_arg/_is_crumb_arg | def _is_crumb_arg(crumb_arg: str) ->bool:
""" Return True if `crumb_arg` is a well formed crumb argument, i.e.,
is a string that starts with `start_sym` and ends with `end_sym`.
False otherwise.
"""
if not isinstance(crumb_arg, str):
return False
start_sym, end_sym = '{', '}'
return crumb_arg.startswith(start_sym) and crumb_arg.endswith(end_sym)
|
medImgProc-2.6.19 | medImgProc-2.6.19//medImgProc/image.pyfile:/medImgProc/image.py:function:adjustPhyLengthToFirst/adjustPhyLengthToFirst | def adjustPhyLengthToFirst(ImageA, ImageB):
"""
Adjust physical length (such as fps to meter per pixel etc) to first image
!!!Warning: editing input class!!!
"""
ImageB.dimlen = ImageA.dimlen[:]
return
|
NdbSearchableBase-1.3 | NdbSearchableBase-1.3//NdbSearchableBase/SearchableModel.pyclass:SearchableModel/_pre_delete_hook | @classmethod
def _pre_delete_hook(cls, key):
"""
Removes instance from index.
"""
if cls.searching_enabled:
doc_id = cls.search_get_document_id(key)
index = cls.search_get_index()
index.delete(doc_id)
|
audiodatasets-1.0.0 | audiodatasets-1.0.0//audiodatasets/searchwords.pyfile:/audiodatasets/searchwords.py:function:search/search | def search(term, corpora):
"""Find all sources that have a given term in them"""
term = term.upper()
for corpus in corpora:
for speaker, content, audio_filename, mfcc in corpus.mfcc_utterances():
if term in content.upper():
yield speaker, content, audio_filename, mfcc
|
rfctl | rfctl//dirtools/globster.pyclass:Globster/identify | @staticmethod
def identify(pattern):
"""Returns pattern category.
:param pattern: normalized pattern.
Identify if a pattern is fullpath, basename or extension
and returns the appropriate type.
"""
if pattern.startswith('RE:') or '/' in pattern:
return 'fullpath'
elif pattern.startswith('*.'):
return 'extension'
else:
return 'basename'
|
bpy | bpy//ops/ptcache.pyfile:/ops/ptcache.py:function:remove/remove | def remove():
"""Delete current cache
"""
pass
|
neutron-tempest-plugin-1.1.0 | neutron-tempest-plugin-1.1.0//neutron_tempest_plugin/api/base.pyclass:BaseAdminNetworkTest/create_flavor | @classmethod
def create_flavor(cls, name, description, service_type):
"""Wrapper utility that returns a test flavor."""
body = cls.admin_client.create_flavor(description=description,
service_type=service_type, name=name)
flavor = body['flavor']
cls.flavors.append(flavor)
return flavor
|
synergy | synergy//client/tabulate.pyfile:/client/tabulate.py:function:_build_simple_row/_build_simple_row | def _build_simple_row(padded_cells, rowfmt):
"""Format row according to DataRow format without padding."""
begin, sep, end = rowfmt
return (begin + sep.join(padded_cells) + end).rstrip()
|
subsystem-0.5.4 | subsystem-0.5.4//subsystem/plugins.pyfile:/subsystem/plugins.py:function:subscope/subscope | def subscope(paths, language):
"""
Download subtitles for multiple video files via subscope script.
NOTE: Runs subscope via shell to support Python 2 version of subscope
(and the Python 3 version has issues)
"""
from subsystem.subsystem import multithreader
multithreader(['subscope', '-l', language], paths)
|
fake-bpy-module-2.79-20200428 | fake-bpy-module-2.79-20200428//bpy/ops/mesh.pyfile:/bpy/ops/mesh.py:function:fill/fill | def fill(use_beauty: bool=True):
"""Fill a selected edge loop with faces
:param use_beauty: Beauty, Use best triangulation division
:type use_beauty: bool
"""
pass
|
asciimatics | asciimatics//particles.pyclass:Particle/_default_next_colour | @staticmethod
def _default_next_colour(particle):
"""
Default next colour implementation - linear progression through
each colour tuple.
"""
return particle.colours[(len(particle.colours) - 1) * particle.time //
particle.life_time]
|
AdHoc-0.3.2 | AdHoc-0.3.2//adhoc.pyclass:AdHoc/line_tag_remove | @classmethod
def line_tag_remove(cls, string, symbol_or_re, is_re=False, delimiters=None):
"""Remove tagged lines.
Default tag delimiters are :attr:`line_delimiters`.
>>> tpl = AdHoc.get_named_template("col-param-closure")
.. >>> printf(str(AdHoc.line_tag_remove(tpl, "adhoc_run_time_section")))
"""
transform = lambda blob: ''
return cls.transform_lines(transform, string, symbol_or_re, is_re,
delimiters)
|
winsys | winsys//fs.pyfile:/fs.py:function:walk/walk | def walk(root, depthfirst=False, error_handler=None):
"""Walk the directory tree starting from root, optionally ignoring
access errors.
:param root: anything accepted by :func:`dir`
:param depthfirst: passed to :meth:`Dir.walk`
:param error_handler: passed to :meth:`Dir.walk`
:returns: as :meth:`Dir.walk`
"""
return dir(root).walk(depthfirst=depthfirst, error_handler=error_handler)
|
pyoes-0.11.0 | pyoes-0.11.0//pyoes/utils.pyfile:/pyoes/utils.py:function:set_attr_filter/set_attr_filter | def set_attr_filter(target, key, value):
"""
Jinja2 filter that sets an attribute of an object.
"""
target[key] = value
return target
|
querybuilder | querybuilder//helpers.pyfile:/helpers.py:function:value_for_keypath/value_for_keypath | def value_for_keypath(dict, keypath):
"""
Returns the value of a keypath in a dictionary
if the keypath exists or None if the keypath
does not exist.
"""
if len(keypath) == 0:
return dict
keys = keypath.split('.')
value = dict
for key in keys:
if key in value:
value = value[key]
else:
return None
return value
|
pyrates | pyrates//ir/circuit.pyclass:CircuitIR/from_circuits | @classmethod
def from_circuits(cls, label: str, circuits: dict):
"""Circuit creation method that takes multiple circuits (templates or instances of `CircuitIR`) as inputs to
create one larger circuit out of these. With additional `connectivity` information, these circuit can directly
be interlinked.
Parameters
----------
label
Name of new circuit. Should not collide with any circuit label given in `circuits`.
circuits
Dictionary with unique circuit labels as keys and circuits as items. Circuits may either be instances of
`CircuitTemplate` or `CircuitIR`. Alternatively, a circuit template may also be given via a sub-dictionary
with keys `template` and `values`, where `values` is a dictionary of variable value updates for the given
template.
Returns
-------
circuit
instance of `CircuitIR`
"""
circuit = cls(label, nodes={}, edges=[])
for name, circ in circuits.items():
circuit.add_circuit(name, circ)
return circuit
|
preio-0.2 | preio-0.2//preio/util.pyfile:/preio/util.py:function:no_spaces/no_spaces | def no_spaces(field):
"""Validates that there are no spaces in a string
Args:
field:str
Returns:
returns Boolean
"""
if (' ' in field) == True:
return False
else:
return True
|
project_generator-0.9.17 | project_generator-0.9.17//project_generator/project.pyclass:ProjectTemplate/_get_common_data_template | @staticmethod
def _get_common_data_template():
""" Data for tool specific """
data_template = {'includes': [], 'linker_file': '', 'macros': [],
'sources': []}
return data_template
|
uszipcode-0.2.4 | uszipcode-0.2.4//uszipcode/pkg/fuzzywuzzy/utils.pyfile:/uszipcode/pkg/fuzzywuzzy/utils.py:function:validate_string/validate_string | def validate_string(s):
"""
Check input has length and that length > 0
:param s:
:return: True if len(s) > 0 else False
"""
try:
return len(s) > 0
except TypeError:
return False
|
zenmake-0.9.0 | zenmake-0.9.0//src/zenmake/waf/waflib/Tools/gcc.pyfile:/src/zenmake/waf/waflib/Tools/gcc.py:function:configure/configure | def configure(conf):
"""
Configuration for gcc
"""
conf.find_gcc()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
conf.check_gcc_o_space()
|
maas | maas//client/bones/helpers.pyfile:/client/bones/helpers.py:function:derive_resource_name/derive_resource_name | def derive_resource_name(name):
"""A stable, human-readable name and identifier for a resource."""
if name.startswith('Anon'):
name = name[4:]
if name.endswith('Handler'):
name = name[:-7]
if name == 'Maas':
name = 'MAAS'
return name
|
SafeHaven-Python-SDK-5.2.5 | SafeHaven-Python-SDK-5.2.5//dgpy/utils.pyfile:/dgpy/utils.py:function:avg/avg | def avg(generator_expression):
"""Return the average of the numbers listed in @generator_expression."""
l = list(generator_expression)
return sum(l) / len(l) if l else 0
|
watson-developer-cloud-2.10.1 | watson-developer-cloud-2.10.1//watson_developer_cloud/compare_comply_v1.pyclass:TableHeaders/_from_dict | @classmethod
def _from_dict(cls, _dict):
"""Initialize a TableHeaders object from a json dictionary."""
args = {}
if 'cell_id' in _dict:
args['cell_id'] = _dict.get('cell_id')
if 'location' in _dict:
args['location'] = _dict.get('location')
if 'text' in _dict:
args['text'] = _dict.get('text')
if 'row_index_begin' in _dict:
args['row_index_begin'] = _dict.get('row_index_begin')
if 'row_index_end' in _dict:
args['row_index_end'] = _dict.get('row_index_end')
if 'column_index_begin' in _dict:
args['column_index_begin'] = _dict.get('column_index_begin')
if 'column_index_end' in _dict:
args['column_index_end'] = _dict.get('column_index_end')
return cls(**args)
|
GSAS-II-WONDER_win-1.0.1 | GSAS-II-WONDER_win-1.0.1//GSAS-II-WONDER/GSASIImath.pyfile:/GSAS-II-WONDER/GSASIImath.py:function:FindAtomIndexByIDs/FindAtomIndexByIDs | def FindAtomIndexByIDs(atomData, loc, IDs, Draw=True):
"""finds the set of atom array indices for a list of atom IDs. Will search
either the Atom table or the drawAtom table.
:param list atomData: Atom or drawAtom table containting coordinates, etc.
:param int loc: location of atom id in atomData record
:param list IDs: atom IDs to be found
:param bool Draw: True if drawAtom table to be searched; False if Atom table
is searched
:returns: list indx: atom (or drawAtom) indices
"""
indx = []
for i, atom in enumerate(atomData):
if Draw and atom[loc] in IDs:
indx.append(i)
elif atom[loc] in IDs:
indx.append(i)
return indx
|
pystrix-1.1.5 | pystrix-1.1.5//pystrix/agi/agi_core.pyfile:/pystrix/agi/agi_core.py:function:quote/quote | def quote(value):
"""
Encapsulates `value` in double-quotes and coerces it into a string, if
necessary.
"""
return '"%(value)s"' % {'value': str(value)}
|
neutron-15.0.2 | neutron-15.0.2//neutron/extensions/subnetallocation.pyclass:Subnetallocation/get_resources | @classmethod
def get_resources(cls):
"""Returns Ext Resources."""
return []
|
eric6 | eric6//eric6_sqlbrowser.pyfile:/eric6_sqlbrowser.py:function:createMainWidget/createMainWidget | def createMainWidget(argv):
"""
Function to create the main widget.
@param argv list of commandline parameters (list of strings)
@return reference to the main widget (QWidget)
"""
from SqlBrowser.SqlBrowser import SqlBrowser
if len(argv) > 1:
connections = argv[1:]
else:
connections = []
browser = SqlBrowser(connections)
return browser
|
lifelib-0.0.14 | lifelib-0.0.14//lifelib/projects/solvency2/projection.pyfile:/lifelib/projects/solvency2/projection.py:function:SizeExpsOther/SizeExpsOther | def SizeExpsOther(t):
"""Other expenses per policy at time t"""
return 0
|
pyboto3-1.4.4 | pyboto3-1.4.4//pyboto3/ec2.pyfile:/pyboto3/ec2.py:function:cancel_conversion_task/cancel_conversion_task | def cancel_conversion_task(DryRun=None, ConversionTaskId=None,
ReasonMessage=None):
"""
Cancels an active conversion task. The task can be the import of an instance or volume. The action removes all artifacts of the conversion, including a partially uploaded volume or instance. If the conversion is complete or is in the process of transferring the final disk image, the command fails and returns an exception.
For more information, see Importing a Virtual Machine Using the Amazon EC2 CLI .
See also: AWS API Documentation
:example: response = client.cancel_conversion_task(
DryRun=True|False,
ConversionTaskId='string',
ReasonMessage='string'
)
:type DryRun: boolean
:param DryRun: Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is DryRunOperation . Otherwise, it is UnauthorizedOperation .
:type ConversionTaskId: string
:param ConversionTaskId: [REQUIRED]
The ID of the conversion task.
:type ReasonMessage: string
:param ReasonMessage: The reason for canceling the conversion task.
"""
pass
|
jam.py-5.4.94 | jam.py-5.4.94//jam/third_party/sqlalchemy/sql/annotation.pyfile:/jam/third_party/sqlalchemy/sql/annotation.py:function:_shallow_annotate/_shallow_annotate | def _shallow_annotate(element, annotations):
"""Annotate the given ClauseElement and copy its internals so that
internal objects refer to the new annotated object.
Basically used to apply a "dont traverse" annotation to a
selectable, without digging throughout the whole
structure wasting time.
"""
element = element._annotate(annotations)
element._copy_internals()
return element
|
zeekpkg | zeekpkg//package.pyfile:/package.py:function:is_valid_name/is_valid_name | def is_valid_name(name):
"""Returns True if name is a valid package name, else False."""
if name != name.strip():
return False
if name in ('package', 'packages'):
return False
return True
|
dk | dk//identifiers/persnr.pyfile:/identifiers/persnr.py:function:is_anonymized/is_anonymized | def is_anonymized(pnr):
"""Returns True iff the pnr has been anonymized.
"""
return pnr[0] == '9'
|
astropy | astropy//coordinates/representation.pyclass:BaseDifferential/_get_base_vectors | @classmethod
def _get_base_vectors(cls, base):
"""Get unit vectors and scale factors from base.
Parameters
----------
base : instance of ``self.base_representation``
The points for which the unit vectors and scale factors should be
retrieved.
Returns
-------
unit_vectors : dict of `CartesianRepresentation`
In the directions of the coordinates of base.
scale_factors : dict of `~astropy.units.Quantity`
Scale factors for each of the coordinates
Raises
------
TypeError : if the base is not of the correct type
"""
cls._check_base(base)
return base.unit_vectors(), base.scale_factors()
|
pyboto3-1.4.4 | pyboto3-1.4.4//pyboto3/codedeploy.pyfile:/pyboto3/codedeploy.py:function:create_application/create_application | def create_application(applicationName=None):
"""
Creates an application.
See also: AWS API Documentation
:example: response = client.create_application(
applicationName='string'
)
:type applicationName: string
:param applicationName: [REQUIRED]
The name of the application. This name must be unique with the applicable IAM user or AWS account.
:rtype: dict
:return: {
'applicationId': 'string'
}
"""
pass
|
export_manager | export_manager//_fsutil.pyfile:/_fsutil.py:function:total_file_count/total_file_count | def total_file_count(path):
"""Returns total number of files under specified path.
The path should be a pathlib.Path instance referring to a file
or directory. For a file this will return 1. Directories will be
traversed recursively.
"""
if path.is_file():
return 1
return sum(1 for child in path.glob('**/*') if child.is_file())
|
matflow | matflow//models/construction.pyfile:/models/construction.py:function:get_schema_dict/get_schema_dict | def get_schema_dict(name, method, all_task_schemas, software_instance=None):
"""Get the schema associated with the method/implementation of this task."""
match_task_idx = None
match_method_idx = None
match_imp_idx = None
for task_ref_idx, task_ref in enumerate(all_task_schemas):
if task_ref['name'] == name:
match_task_idx = task_ref_idx
for met_idx, met in enumerate(task_ref['methods']):
if met['name'] == method:
match_method_idx = met_idx
implementations = met.get('implementations')
if implementations:
for imp_idx, imp in enumerate(implementations):
if imp['name'] == software_instance['name']:
match_imp_idx = imp_idx
break
break
break
if match_task_idx is None:
msg = f'No matching task found with name: "{name}"'
raise ValueError(msg)
if match_method_idx is None:
msg = (
f'No matching method found with name: "{method}" in task: "{name}""'
)
raise ValueError(msg)
task_ref = all_task_schemas[match_task_idx]
met_ref = task_ref['methods'][met_idx]
inputs = task_ref.get('inputs', []) + met_ref.get('inputs', [])
outputs = task_ref.get('outputs', []) + met_ref.get('outputs', [])
imp_ref = None
in_map = None
out_map = None
command_opt = None
if match_imp_idx is not None:
imp_ref = met_ref['implementations'][match_imp_idx]
inputs += imp_ref.get('inputs', [])
outputs += imp_ref.get('outputs', [])
in_map = imp_ref.get('input_map', [])
out_map = imp_ref.get('output_map', [])
command_opt = imp_ref.get('commands', [])
outputs = list(set(outputs))
if software_instance:
implementation = software_instance['name']
command_group = {'commands': command_opt, 'env_pre':
software_instance.get('env_pre'), 'env_post': software_instance
.get('env_post')}
else:
implementation = None
command_group = None
schema_dict = {'name': name, 'method': method, 'implementation':
implementation, 'inputs': inputs, 'outputs': outputs, 'input_map':
in_map, 'output_map': out_map, 'command_group': command_group}
return schema_dict
|
phylopypruner-0.9.6 | phylopypruner-0.9.6//phylopypruner/filtering.pyfile:/phylopypruner/filtering.py:function:too_few_otus/too_few_otus | def too_few_otus(tree, threshold):
"""
Return true if the provided tree node object have fewer OTUs than the
provided threshold.
Parameters
----------
tree : TreeNode object
The tree you wish to consider.
threshold : int
Minimum number of OTUs allowed in the provided tree.
Returns
-------
True or False
True if the tree contains to few OTUs.
"""
return len(set(tree.iter_otus())) < threshold
|
smof-2.18.0 | smof-2.18.0//smof/main.pyclass:FileDescription/_has_start | @classmethod
def _has_start(cls, s):
"""
Tests if the first codon is the START codon, assumes uppercase
"""
return s[0:3] == 'ATG'
|
pyCoalesce-0.0.43.dev4 | pyCoalesce-0.0.43.dev4//pyCoalesce/coalesce_request.pyfile:/pyCoalesce/coalesce_request.py:function:create_search_group/create_search_group | def create_search_group(recordset=None, field=None, values=None,
criteria_operator='EqualTo'):
"""
Create a Coalesce search group/filter object (the value of "group" in a
`Coalesce search request object
<https://github.com/InCadence/coalesce/wiki/REST-API#search-query-data-format>`_)
that combines all entered values for a given field, either including or
excluding (depending on the value of "criteria_operator") all records
matching any of the supplied values.
:param recordset: the (:class:`str`) recordset of the field to be
search on
:param field: the (:class:`str`) field to be searched on
:param values: a :class:`list` or list-like of values to search for.
The values themselves should be of simple, JSON-serializable types
(e.g., strings, numbers, Boolean).
:param criteria_operator: "EqualTo", if the search is to *include*
a record matching *any* element of "values", or "NotEqualTo", if
the search is to *exclude all* records matching *any* element of
"values".
:returns: a Coalesce search group as a :class:`dict`, or ``None`` if
"values" is empty
"""
if not isinstance(recordset, str):
raise TypeError('Please supply a recordset as a string.')
if not isinstance(field, str):
raise TypeError('Please supply a field to search on, as a string.')
values_error_msg = ('Please supply a list or list-like of values to ' +
'search for.')
if not values:
raise TypeError(values_error_msg)
elif isinstance(values, str):
raise TypeError(values_error_msg)
else:
try:
values[0]
except:
raise TypeError(values_error_msg)
if criteria_operator == 'EqualTo':
group_operator = 'OR'
elif criteria_operator == 'NotEqualTo':
group_operator = 'AND'
else:
raise ValueError('The value of "criteria_operator" must be either ' +
'"EqualTo" or "NotEqualTo".')
criteria = []
for value in values:
criteria_set = {'recordset': recordset, 'field': field, 'operator':
criteria_operator, 'value': value, 'matchCase': False}
criteria.append(criteria_set)
if len(criteria) > 0:
group = {'operator': group_operator, 'criteria': criteria}
else:
group = None
return group
|
mrjob-0.7.2 | mrjob-0.7.2//mrjob/sim.pyfile:/mrjob/sim.py:function:_group_records_for_split/_group_records_for_split | def _group_records_for_split(record_gen, split_size, reducer_key=None):
"""Helper for _split_records()."""
split_num = 0
bytes_in_split = 0
last_key_value = None
for record in record_gen:
same_key = False
if reducer_key:
key_value = reducer_key(record)
same_key = key_value == last_key_value
last_key_value = key_value
if bytes_in_split >= split_size and not same_key:
split_num += 1
bytes_in_split = 0
yield split_num, record
bytes_in_split += len(record)
|
temp2temp | temp2temp//temperature.pyclass:Romer/to_delisle | @staticmethod
def to_delisle(romer):
""" Convert romer to delisle """
return (60 - romer) * 20 / 7
|
grammpy-2.0.0 | grammpy-2.0.0//grammpy/transforms/Manipulations.pyclass:Manipulations/replaceNode | @staticmethod
def replaceNode(oldNode, newNode):
"""
Replace instance of Nonterminal or Terminal in the tree with another one.
:param oldNode: Old nonterminal or terminal already in the tree.
:param newNode: Instance of nonterminal or terminal to replace with.
:return: Instance `newNode` in the tree.
"""
if oldNode.from_rule is not None and len(oldNode.from_rule.to_symbols) > 0:
indexParent = oldNode.from_rule.to_symbols.index(oldNode)
oldNode.from_rule.to_symbols[indexParent] = newNode
newNode._set_from_rule(oldNode.from_rule)
if oldNode.to_rule is not None and len(oldNode.to_rule.from_symbols) > 0:
indexChild = oldNode.to_rule.from_symbols.index(oldNode)
oldNode.to_rule._from_symbols[indexChild] = newNode
newNode._set_to_rule(oldNode.to_rule)
return newNode
|
mercurial-5.4 | mercurial-5.4//mercurial/interfaces/repository.pyclass:ifileindex/iscensored | def iscensored(rev):
"""Return whether a revision's content has been censored."""
|
chatette | chatette//utils.pyclass:Singleton/reset_instance | @classmethod
def reset_instance(cls, *args, **kwargs):
"""
Completely resets the instance of the class
(representing the singleton), makes a new one and
returns this instance.
"""
cls._instance = None
cls._instance = cls(*args, **kwargs)
return cls._instance
|
pymavlink-ws-xrendan-2.4.4 | pymavlink-ws-xrendan-2.4.4//generator/mavparse.pyfile:/generator/mavparse.py:function:total_msgs/total_msgs | def total_msgs(xml):
"""count total number of msgs"""
count = 0
for x in xml:
count += len(x.message)
return count
|
pauvre | pauvre//gfftools.pyfile:/gfftools.py:function:x_offset_gff/x_offset_gff | def x_offset_gff(GFFParseobj, x_offset):
"""Takes in a gff object (a gff file parsed as a pandas dataframe),
and an x_offset value and shifts the start, stop, center, lmost, and rmost.
Returns a GFFParse object with the shifted values in GFFParse.features.
"""
for columnname in ['start', 'stop', 'center', 'lmost', 'rmost']:
GFFParseobj.features[columnname] = GFFParseobj.features[columnname
] + x_offset
return GFFParseobj
|
EinSumConv-0.0.2 | EinSumConv-0.0.2//EinSumConv/eps.pyfile:/EinSumConv/eps.py:function:oddEven/oddEven | def oddEven(num):
"""returns 1 if num is even, returns -1 if num is odd"""
if num % 2:
return -1
return 1
|
pyramid_weblayer-0.14.7 | pyramid_weblayer-0.14.7//src/pyramid_weblayer/seen.pyfile:/src/pyramid_weblayer/seen.py:function:get_has_been_seen/get_has_been_seen | def get_has_been_seen(request):
"""Return ``True`` if the request has the ``has_been_seen_before`` cookie.
>>> from mock import Mock
>>> mock_request = Mock()
False if the request doesn't have the cookie::
>>> mock_request.cookies = {}
>>> get_has_been_seen(mock_request)
False
True if the request does::
>>> mock_request.cookies = {'has_been_seen_before': 'true'}
>>> get_has_been_seen(mock_request)
True
"""
return bool(request.cookies.get('has_been_seen_before', False))
|
flywheel | flywheel//models/permission.pyclass:Permission/positional_to_model | @staticmethod
def positional_to_model(value):
"""Converts a positional argument to a model value"""
return value
|
designate | designate//objects/validation_error.pyclass:ValidationError/from_js_error | @classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
|
pytzer | pytzer//parameters.pyfile:/parameters.py:function:psi_Mnjj_Na_Cl_PK74/psi_Mnjj_Na_Cl_PK74 | def psi_Mnjj_Na_Cl_PK74(T, P):
"""c-c'-a: manganese(II) sodium chloride [PK74]."""
psi = -0.003
valid = T == 298.15
return psi, valid
|
multiplex | multiplex//util.pyfile:/util.py:function:overlapping_bb/overlapping_bb | def overlapping_bb(bb1, bb2):
"""
Check whether the two given bounding boxes overlap.
:param bb1: The first bounding box.
:type bb1: :class:`matplotlib.transforms.Bbox`
:param bb2: The second bounding box.
:type bb2: :class:`matplotlib.transforms.Bbox`
:return: A boolean indicating whether the two bounding boxes overlap.
:rtype: bool
"""
return (bb2.x0 < bb1.x0 < bb2.x1 or bb2.x0 < bb1.x1 < bb2.x1) and (bb2.
y0 < bb1.y0 < bb2.y1 or bb2.y0 < bb1.y1 < bb2.y1) or (bb1.x0 < bb2.
x0 < bb1.x1 or bb1.x0 < bb2.x1 < bb1.x1) and (bb1.y0 < bb2.y0 < bb1
.y1 or bb1.y0 < bb2.y1 < bb1.y1)
|
fake-bpy-module-2.80-20200428 | fake-bpy-module-2.80-20200428//bpy/ops/sequencer.pyfile:/bpy/ops/sequencer.py:function:copy/copy | def copy():
"""Undocumented contribute <https://developer.blender.org/T51061>
"""
pass
|
txWS-Upgrade-0.10.0 | txWS-Upgrade-0.10.0//txws.pyfile:/txws.py:function:is_hybi00/is_hybi00 | def is_hybi00(headers):
"""
Determine whether a given set of headers is HyBi-00-compliant.
Hixie-76 and HyBi-00 use a pair of keys in the headers to handshake with
servers.
"""
return 'Sec-WebSocket-Key1' in headers and 'Sec-WebSocket-Key2' in headers
|
gbp-0.9.19 | gbp-0.9.19//gbp/patch_series.pyclass:PatchSeries/_split_strip | @classmethod
def _split_strip(cls, line):
"""
Separate the -p<num> option from the patch name
>>> PatchSeries._split_strip("asdf -p1")
('asdf', 1)
>>> PatchSeries._split_strip("a/nice/patch")
('a/nice/patch', None)
>>> PatchSeries._split_strip("asdf foo")
('asdf foo', None)
"""
patch = line
strip = None
split = line.rsplit(None, 1)
if len(split) > 1:
m = cls.level_re.match(split[1])
if m:
patch = split[0]
strip = int(m.group('level'))
return patch, strip
|
bpy | bpy//ops/image.pyfile:/ops/image.py:function:project_edit/project_edit | def project_edit():
"""Edit a snapshot of the view-port in an external image editor
"""
pass
|
fake-bpy-module-2.79-20200428 | fake-bpy-module-2.79-20200428//bpy/ops/gpencil.pyfile:/bpy/ops/gpencil.py:function:palettecolor_isolate/palettecolor_isolate | def palettecolor_isolate(affect_visibility: bool=False):
"""Toggle whether the active color is the only one that is editable and/or visible
:param affect_visibility: Affect Visibility, In addition to toggling the editability, also affect the visibility
:type affect_visibility: bool
"""
pass
|
ffmpeg_streaming | ffmpeg_streaming//_command_builder.pyfile:/_command_builder.py:function:_hls_seg_ext/_hls_seg_ext | def _hls_seg_ext(hls):
"""
@TODO: add documentation
"""
return 'm4s' if hls.options.get('hls_segment_type', '') == 'fmp4' else 'ts'
|
cloudomate | cloudomate//gateway/coinbase.pyclass:Coinbase/_extract_address | @staticmethod
def _extract_address(bitcoin_url):
"""
Extract address from bitcoin url
:param bitcoin_url: bitcoin url
:return: Bitcoin address
"""
address_text, _ = bitcoin_url.split('?')
address = address_text.split(':')[1]
return address
|
plot_utils | plot_utils//helper.pyfile:/helper.py:function:_calc_bar_width/_calc_bar_width | def _calc_bar_width(width):
"""
Calculate width (in points) of bar plot from figure width (in inches).
"""
if width <= 7:
bar_width = width * 3.35
elif width <= 9:
bar_width = width * 2.6
elif width <= 10:
bar_width = width * 2.1
else:
bar_width = width * 1.2
return bar_width
|
delphixpy | delphixpy//v1_10_3/common.pyfile:/v1_10_3/common.py:function:validate_format/validate_format | def validate_format(*_arg):
"""
This method can be overridden with format validation logic.
"""
return True
|
doubledate | doubledate//utils.pyfile:/utils.py:function:soy/soy | def soy(date, offset=0):
"""
Returns the start of the year at a given offset
"""
return type(date)(date.year + offset, 1, 1)
|
gotran-2020.2.0 | gotran-2020.2.0//gotran/codegeneration/avoidsympycontractions.pyfile:/gotran/codegeneration/avoidsympycontractions.py:function:enable_evaluation/enable_evaluation | def enable_evaluation():
"""
Enable Add, Mul and Pow contractions
"""
global _evaluate
_evaluate = True
|
wirepas_backend_client | wirepas_backend_client//api/influx/connectors.pyclass:Influx/_decode_array | @staticmethod
def _decode_array(payload: str, elements: dict) ->list:
"""
Maps the elements of an array present in the payload string
Args:
payload (str): An influx WM message
elements (dict): A dictionary of elements to look for
Returns:
An array with named fields as dictionary
"""
payload = payload.replace('[', '').replace(']', '')
payload = payload.split(',')
array = list()
target = dict()
for entry in payload:
values = entry.split(':')
for _type, _convertion in elements.items():
if _type in values[0]:
target[_type] = _convertion['base'](''.join(filter(lambda c:
c not in "{}'", values[1])))
break
if len(target.keys()) == len(elements.keys()):
array.append(target.copy())
target = dict()
return array
|
campdown | campdown//helpers.pyfile:/helpers.py:function:valid_url/valid_url | def valid_url(url):
"""
Validate a URL and make sure that it has the correct URL syntax.
Args:
url (str): URL string to be evaluated.
Returns:
True if the URL is valid. False if it is invalid.
"""
if 'http://' not in url and 'https://' not in url:
return False
return True
|
iparser-0.1.8 | iparser-0.1.8//iparser/parser/common/utils.pyfile:/iparser/parser/common/utils.py:function:LSTM/LSTM | def LSTM(lstm, inputs, batch_size=None, dropout_x=0.0, dropout_h=0.0):
"""
unidirectional LSTM
:param lstm: one LSTM layer
:param inputs: # seq_len x batch_size
:param batch_size:
:param dropout_x:
:param dropout_h:
:return: Output of LSTM layer, seq_len x hidden_dim x batch_size
"""
s = lstm.initial_state()
lstm.set_dropouts(dropout_x, dropout_h)
if batch_size is not None:
lstm.set_dropout_masks(batch_size)
hs = s.add_inputs(inputs)
return hs
|
PyEIS | PyEIS//PyEIS_Lin_KK.pyfile:/PyEIS_Lin_KK.py:function:KK_RC47_fit/KK_RC47_fit | def KK_RC47_fit(params, w, t_values):
"""
Kramers-Kronig Function: -RC-
Kristian B. Knudsen ([email protected] / [email protected])
"""
Rs = params['Rs']
R1 = params['R1']
R2 = params['R2']
R3 = params['R3']
R4 = params['R4']
R5 = params['R5']
R6 = params['R6']
R7 = params['R7']
R8 = params['R8']
R9 = params['R9']
R10 = params['R10']
R11 = params['R11']
R12 = params['R12']
R13 = params['R13']
R14 = params['R14']
R15 = params['R15']
R16 = params['R16']
R17 = params['R17']
R18 = params['R18']
R19 = params['R19']
R20 = params['R20']
R21 = params['R21']
R22 = params['R22']
R23 = params['R23']
R24 = params['R24']
R25 = params['R25']
R26 = params['R26']
R27 = params['R27']
R28 = params['R28']
R29 = params['R29']
R30 = params['R30']
R31 = params['R31']
R32 = params['R32']
R33 = params['R33']
R34 = params['R34']
R35 = params['R35']
R36 = params['R36']
R37 = params['R37']
R38 = params['R38']
R39 = params['R39']
R40 = params['R40']
R41 = params['R41']
R42 = params['R42']
R43 = params['R43']
R44 = params['R44']
R45 = params['R45']
R46 = params['R46']
R47 = params['R47']
return Rs + R1 / (1 + w * 1.0j * t_values[0]) + R2 / (1 + w * 1.0j *
t_values[1]) + R3 / (1 + w * 1.0j * t_values[2]) + R4 / (1 + w *
1.0j * t_values[3]) + R5 / (1 + w * 1.0j * t_values[4]) + R6 / (1 +
w * 1.0j * t_values[5]) + R7 / (1 + w * 1.0j * t_values[6]) + R8 / (
1 + w * 1.0j * t_values[7]) + R9 / (1 + w * 1.0j * t_values[8]
) + R10 / (1 + w * 1.0j * t_values[9]) + R11 / (1 + w * 1.0j *
t_values[10]) + R12 / (1 + w * 1.0j * t_values[11]) + R13 / (1 + w *
1.0j * t_values[12]) + R14 / (1 + w * 1.0j * t_values[13]) + R15 / (
1 + w * 1.0j * t_values[14]) + R16 / (1 + w * 1.0j * t_values[15]
) + R17 / (1 + w * 1.0j * t_values[16]) + R18 / (1 + w * 1.0j *
t_values[17]) + R19 / (1 + w * 1.0j * t_values[18]) + R20 / (1 + w *
1.0j * t_values[19]) + R21 / (1 + w * 1.0j * t_values[20]) + R22 / (
1 + w * 1.0j * t_values[21]) + R23 / (1 + w * 1.0j * t_values[22]
) + R24 / (1 + w * 1.0j * t_values[23]) + R25 / (1 + w * 1.0j *
t_values[24]) + R26 / (1 + w * 1.0j * t_values[25]) + R27 / (1 + w *
1.0j * t_values[26]) + R28 / (1 + w * 1.0j * t_values[27]) + R29 / (
1 + w * 1.0j * t_values[28]) + R30 / (1 + w * 1.0j * t_values[29]
) + R31 / (1 + w * 1.0j * t_values[30]) + R32 / (1 + w * 1.0j *
t_values[31]) + R33 / (1 + w * 1.0j * t_values[32]) + R34 / (1 + w *
1.0j * t_values[33]) + R35 / (1 + w * 1.0j * t_values[34]) + R36 / (
1 + w * 1.0j * t_values[35]) + R37 / (1 + w * 1.0j * t_values[36]
) + R38 / (1 + w * 1.0j * t_values[37]) + R39 / (1 + w * 1.0j *
t_values[38]) + R40 / (1 + w * 1.0j * t_values[39]) + R41 / (1 + w *
1.0j * t_values[40]) + R42 / (1 + w * 1.0j * t_values[41]) + R43 / (
1 + w * 1.0j * t_values[42]) + R44 / (1 + w * 1.0j * t_values[43]
) + R45 / (1 + w * 1.0j * t_values[44]) + R46 / (1 + w * 1.0j *
t_values[45]) + R47 / (1 + w * 1.0j * t_values[46])
|
enablebanking | enablebanking//models/coop_pank_connector_settings.pyclass:CoopPankConnectorSettings/__repr__ | def __repr__(A):
"""For `print` and `pprint`"""
return A.to_str()
|
das7pad-dns-lexicon-3.3.12 | das7pad-dns-lexicon-3.3.12//lexicon/providers/zeit.pyfile:/lexicon/providers/zeit.py:function:provider_parser/provider_parser | def provider_parser(subparser):
"""Configure provider parser for Zeit"""
subparser.description = """
Zeit Provider requires a token to access its API.
You can generate one for your account on the following URL:
https://zeit.co/account/tokens"""
subparser.add_argument('--auth-token', help='specify your API token')
|
histogrammar | histogrammar//hgawk_grammar.pyfile:/hgawk_grammar.py:function:p_compound_stmt_7/p_compound_stmt_7 | def p_compound_stmt_7(p):
"""compound_stmt : classdef"""
p[0] = [p[1]]
|
otmt-1.0.1a3 | otmt-1.0.1a3//otmt/timemap_measures.pyfile:/otmt/timemap_measures.py:function:compute_scores_on_distance_measure/compute_scores_on_distance_measure | def compute_scores_on_distance_measure(first_data, memento_data,
distance_function):
"""Calculates the distance between scores for those measures that use
functions from the distance library.
"""
score = None
if len(memento_data) == 0:
if len(first_data) == 0:
score = 0
else:
score = distance_function(first_data, memento_data)
else:
score = distance_function(first_data, memento_data)
return score
|
graphql-example-0.4.4 | graphql-example-0.4.4//vendor/requests/utils.pyfile:/vendor/requests/utils.py:function:iter_slices/iter_slices | def iter_slices(string, slice_length):
"""Iterate over slices of a string."""
pos = 0
if slice_length is None or slice_length <= 0:
slice_length = len(string)
while pos < len(string):
yield string[pos:pos + slice_length]
pos += slice_length
|
py_ball | py_ball//salaries.pyfile:/salaries.py:function:historical_player_salary_values/historical_player_salary_values | def historical_player_salary_values(html_text):
""" historical_player_salary_values returns a dictionary of
salary information keyed by Hoopshype player ID
@param **html_text** (*str*): String of the HTML response
from a Hoopshype team URL
Returns:
**player_salaries** (*dict*): Dictionary keyed by Hoopshype
player ID with a list of salary and inflation-adjusted
salary (followed by player salary URL) as values
"""
value_str = 'data-value="'
option_str = 'style="'
player_salaries = {}
url_val = html_text.find('<a')
total_val = html_text.find('class="name">Totals</td>')
while url_val < total_val:
player_list = []
start_ind = html_text.find('<a')
end_ind = html_text.find('</a>')
play_key = html_text[start_ind + 2:end_ind]
play_key = play_key[play_key.find('href="') + 6:play_key.find('">\n')]
player_key = play_key.split('/')[-3]
html_text = html_text[end_ind + 4:]
for col_count in range(0, 2):
start_ind = html_text.find(option_str) + len(option_str)
end_ind = html_text.find('" ')
html_text = html_text[end_ind + 2:]
start_ind = html_text.find(value_str) + len(value_str)
end_ind = html_text.find('">')
try:
salary_value = int(html_text[start_ind:end_ind])
except:
salary_value = 0
player_list.append(salary_value)
html_text = html_text[end_ind + 2:]
player_list.append(play_key)
player_salaries[player_key] = {}
player_salaries[player_key]['salary'] = player_list
total_val = html_text.find('class="name">Totals</td>')
url_val = html_text.find('<a')
return player_salaries
|
algorithms | algorithms//asa.pyfile:/asa.py:function:_get_consecutive_portions_of_front/_get_consecutive_portions_of_front | def _get_consecutive_portions_of_front(front):
"""
Yields lists of the form [(f, s), (f, s)], one at a time from the given front (which is a list of the same form),
such that each list yielded is consecutive in frequency.
"""
last_f = None
ls = []
for f, s in front:
if last_f is not None and f != last_f + 1:
yield ls
ls = []
ls.append((f, s))
last_f = f
yield ls
|
mafipy | mafipy//replication/replication_method.pyfile:/replication/replication_method.py:function:linear_annuity_mapping_fprime/linear_annuity_mapping_fprime | def linear_annuity_mapping_fprime(underlying, alpha0, alpha1):
"""linear_annuity_mapping_fprime
first derivative of linear annuity mapping function.
See :py:func:`linear_annuity_mapping_func`.
The function calculates following formula:
.. math::
\\alpha^{\\prime}(S) := \\alpha_{0.}
where
:math:`S` is underlying,
:math:`\\alpha_{0}` is alpha0.
:param float underlying:
:param float alpha0:
:param float alpha1: not used.
:return: value of first derivative of linear annuity mapping function.
:rtype: float.
"""
return alpha0
|
bpy | bpy//ops/mesh.pyfile:/ops/mesh.py:function:rip_edge_move/rip_edge_move | def rip_edge_move(MESH_OT_rip_edge=None, TRANSFORM_OT_translate=None):
"""Extend vertices and move the result
:param MESH_OT_rip_edge: Extend Vertices, Extend vertices along the edge closest to the cursor
:param TRANSFORM_OT_translate: Move, Move selected items
"""
pass
|
whratio-3.1.1 | whratio-3.1.1//whratio/ratio.pyfile:/whratio/ratio.py:function:get_gcd/get_gcd | def get_gcd(a, b):
"""Return greatest common divisor for a and b."""
while a:
a, b = b % a, a
return b
|
pdm_utils-0.3.0 | pdm_utils-0.3.0//src/pdm_utils/functions/basic.pyfile:/src/pdm_utils/functions/basic.py:function:identify_two_list_duplicates/identify_two_list_duplicates | def identify_two_list_duplicates(item1_list, item2_list):
"""Identify duplicate items between two lists.
:param item1_list: The first input list to be checked.
:type item1_list: list
:param item2_list: The second input list to be checked.
:type item2_list: list
:returns:
The set of non-unique/duplicated items between the two lists
(but not duplicate items within each list).
:rtype: set
"""
item1_set = set(item1_list)
item2_set = set(item2_list)
item3_set = item1_set & item2_set
return item3_set
|
p4p-3.4.2 | p4p-3.4.2//src/p4p/asLib/yacc.pyfile:/src/p4p/asLib/yacc.py:function:p_head/p_head | def p_head(p):
"""uag_ref : UAG '(' string_list ')'
hag_ref : HAG '(' string_list ')'
calc_head : CALC '(' STRING ')'
"""
p[0] = p[1], p[3]
|
region | region//util.pyfile:/util.py:function:make_move/make_move | def make_move(moving_area, new_label, labels):
"""
Modify the `labels` argument in place (no return value!) such that the
area `moving_area` has the new region label `new_label`.
Parameters
----------
moving_area :
The area to be moved (assigned to a new region).
new_label : `int`
The new region label of area `moving_area`.
labels : :class:`numpy.ndarray`
Each element is a region label of the area corresponding array index.
Examples
--------
>>> import numpy as np
>>> labels = np.array([0, 0, 0, 0, 1, 1])
>>> make_move(3, 1, labels)
>>> (labels == np.array([0, 0, 0, 1, 1, 1])).all()
True
"""
labels[moving_area] = new_label
|
blib-3 | blib-3//bl/url.pyfile:/bl/url.py:function:strip_html/strip_html | def strip_html(text):
""" strip html code. """
if text.startswith('http'):
return text
import bs4
soup = bs4.BeautifulSoup(text, 'lxml')
res = ''
for chunk in soup.findAll(text=True):
if isinstance(chunk, bs4.CData):
res += str(chunk.content[0]) + ' '
else:
res += str(chunk) + ' '
return res
|
dropbox | dropbox//team_log.pyclass:EventType/member_space_limits_change_policy | @classmethod
def member_space_limits_change_policy(cls, val):
"""
Create an instance of this class set to the
``member_space_limits_change_policy`` tag with value ``val``.
:param MemberSpaceLimitsChangePolicyType val:
:rtype: EventType
"""
return cls('member_space_limits_change_policy', val)
|
wicc-1.0.0 | wicc-1.0.0//wicc/cryptos/deterministic.pyfile:/wicc/cryptos/deterministic.py:function:parse_bip32_path/parse_bip32_path | def parse_bip32_path(path):
"""Takes bip32 path, "m/0'/2H" or "m/0H/1/2H/2/1000000000.pub", returns list of ints """
path = path.lstrip('m/').rstrip('.pub')
if not path:
return []
patharr = []
for v in path.split('/'):
if not v:
continue
elif v[-1] in "'H":
v = int(v[:-1]) | 2147483648
else:
v = int(v) & 2147483647
patharr.append(v)
return patharr
|
noworkflow-1.12.0 | noworkflow-1.12.0//noworkflow/now/persistence/models/trial_prolog.pyclass:TrialProlog/init_cli | @classmethod
def init_cli(cls):
"""Initialize swipl if it was not initialized yet"""
from . import Trial
if not cls.prolog_cli:
from pyswip import Prolog
cls.prolog_cli = Prolog()
cls.prolog_cli.assertz(Trial.prolog_description.empty()[:-1])
cls.prolog_cli.assertz('load_rules(0)')
|
konfiture-0.3.0 | konfiture-0.3.0//grammalecte/fr/gc_engine.pyfile:/grammalecte/fr/gc_engine.py:function:_rewrite/_rewrite | def _rewrite(s, sRepl, iGroup, m, bUppercase):
"""text processor: write sRepl in s at iGroup position"""
nLen = m.end(iGroup) - m.start(iGroup)
if sRepl == '*':
sNew = ' ' * nLen
elif sRepl == '>' or sRepl == '_' or sRepl == '~':
sNew = sRepl + ' ' * (nLen - 1)
elif sRepl == '@':
sNew = '@' * nLen
elif sRepl[0:1] == '=':
sNew = globals()[sRepl[1:]](s, m)
sNew = sNew + ' ' * (nLen - len(sNew))
if bUppercase and m.group(iGroup)[0:1].isupper():
sNew = sNew.capitalize()
else:
sNew = m.expand(sRepl)
sNew = sNew + ' ' * (nLen - len(sNew))
return s[0:m.start(iGroup)] + sNew + s[m.end(iGroup):]
|
End of preview. Expand
in Dataset Viewer.
No dataset card yet
New: Create and edit this dataset card directly on the website!
Contribute a Dataset Card- Downloads last month
- 18