repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
awickert/gFlex
gflex/base.py
Flexure.SAS
def SAS(self): """ Set-up for the rectangularly-gridded superposition of analytical solutions method for solving flexure """ if self.x is None: self.x = np.arange(self.dx/2., self.dx * self.qs.shape[0], self.dx) if self.filename: # Define the (scalar) elastic thickness self.Te = self.configGet("float", "input", "ElasticThickness") # Define a stress-based qs = q0 self.qs = self.q0.copy() # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self.q0 if self.dimension == 2: if self.y is None: self.y = np.arange(self.dy/2., self.dy * self.qs.shape[0], self.dy) # Define a stress-based qs = q0 # But only if the latter has not already been defined # (e.g., by the getters and setters) try: self.qs except: self.qs = self.q0.copy() # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self.q0 from scipy.special import kei
python
def SAS(self): """ Set-up for the rectangularly-gridded superposition of analytical solutions method for solving flexure """ if self.x is None: self.x = np.arange(self.dx/2., self.dx * self.qs.shape[0], self.dx) if self.filename: # Define the (scalar) elastic thickness self.Te = self.configGet("float", "input", "ElasticThickness") # Define a stress-based qs = q0 self.qs = self.q0.copy() # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self.q0 if self.dimension == 2: if self.y is None: self.y = np.arange(self.dy/2., self.dy * self.qs.shape[0], self.dy) # Define a stress-based qs = q0 # But only if the latter has not already been defined # (e.g., by the getters and setters) try: self.qs except: self.qs = self.q0.copy() # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self.q0 from scipy.special import kei
[ "def", "SAS", "(", "self", ")", ":", "if", "self", ".", "x", "is", "None", ":", "self", ".", "x", "=", "np", ".", "arange", "(", "self", ".", "dx", "/", "2.", ",", "self", ".", "dx", "*", "self", ".", "qs", ".", "shape", "[", "0", "]", ",", "self", ".", "dx", ")", "if", "self", ".", "filename", ":", "self", ".", "Te", "=", "self", ".", "configGet", "(", "\"float\"", ",", "\"input\"", ",", "\"ElasticThickness\"", ")", "self", ".", "qs", "=", "self", ".", "q0", ".", "copy", "(", ")", "del", "self", ".", "q0", "if", "self", ".", "dimension", "==", "2", ":", "if", "self", ".", "y", "is", "None", ":", "self", ".", "y", "=", "np", ".", "arange", "(", "self", ".", "dy", "/", "2.", ",", "self", ".", "dy", "*", "self", ".", "qs", ".", "shape", "[", "0", "]", ",", "self", ".", "dy", ")", "try", ":", "self", ".", "qs", "except", ":", "self", ".", "qs", "=", "self", ".", "q0", ".", "copy", "(", ")", "del", "self", ".", "q0", "from", "scipy", ".", "special", "import", "kei" ]
Set-up for the rectangularly-gridded superposition of analytical solutions method for solving flexure
[ "Set", "-", "up", "for", "the", "rectangularly", "-", "gridded", "superposition", "of", "analytical", "solutions", "method", "for", "solving", "flexure" ]
3ac32249375b0f8d342a142585d86ea4d905a5a0
https://github.com/awickert/gFlex/blob/3ac32249375b0f8d342a142585d86ea4d905a5a0/gflex/base.py#L1017-L1045
train
awickert/gFlex
gflex/base.py
Flexure.SAS_NG
def SAS_NG(self): """ Set-up for the ungridded superposition of analytical solutions method for solving flexure """ if self.filename: # Define the (scalar) elastic thickness self.Te = self.configGet("float", "input", "ElasticThickness") # See if it wants to be run in lat/lon # Could put under in 2D if-statement, but could imagine an eventual desire # to change this and have 1D lat/lon profiles as well. # So while the options will be under "numerical2D", this place here will # remain held for an eventual future. self.latlon = self.configGet("string", "numerical2D", "latlon", optional=True) self.PlanetaryRadius = self.configGet("float", "numerical2D", "PlanetaryRadius", optional=True) if self.dimension == 2: from scipy.special import kei # Parse out input q0 into variables of imoprtance for solution if self.dimension == 1: try: # If these have already been set, e.g., by getters/setters, great! self.x self.q except: # Using [x, y, w] configuration file if self.q0.shape[1] == 2: self.x = self.q0[:,0] self.q = self.q0[:,1] else: sys.exit("For 1D (ungridded) SAS_NG configuration file, need [x,w] array. Your dimensions are: "+str(self.q0.shape)) else: try: # If these have already been set, e.g., by getters/setters, great! self.x self.u self.q except: # Using [x, y, w] configuration file if self.q0.shape[1] == 3: self.x = self.q0[:,0] self.y = self.q0[:,1] self.q = self.q0[:,2] else: sys.exit("For 2D (ungridded) SAS_NG configuration file, need [x,y,w] array. Your dimensions are: "+str(self.q0.shape)) # x, y are in absolute coordinates. Create a local grid reference to # these. This local grid, which starts at (0,0), is defined just so that # we have a way of running the model without defined real-world # coordinates self.x = self.x if self.dimension == 2: self.y = self.y # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self.q0 # Check if a seperate output set of x,y points has been defined # otherwise, set those values to None # First, try to load the arrays try: self.xw except: try: self.xw = self.configGet('string', "input", "xw", optional=True) if self.xw == '': self.xw = None except: self.xw = None # If strings, load arrays if type(self.xw) == str: self.xw = self.loadFile(self.xw) if self.dimension == 2: try: # already set by setter? self.yw except: try: self.yw = self.configGet('string', "input", "yw", optional=True ) if self.yw == '': self.yw = None except: self.yw = None # At this point, can check if we have both None or both defined if (self.xw is not None and self.yw is None) \ or (self.xw is None and self.yw is not None): sys.exit("SAS_NG output at specified points requires both xw and yw to be defined") # All right, now just finish defining if type(self.yw) == str: self.yw = self.loadFile(self.yw) elif self.yw is None: self.yw = self.y.copy() if self.xw is None: self.xw = self.x.copy()
python
def SAS_NG(self): """ Set-up for the ungridded superposition of analytical solutions method for solving flexure """ if self.filename: # Define the (scalar) elastic thickness self.Te = self.configGet("float", "input", "ElasticThickness") # See if it wants to be run in lat/lon # Could put under in 2D if-statement, but could imagine an eventual desire # to change this and have 1D lat/lon profiles as well. # So while the options will be under "numerical2D", this place here will # remain held for an eventual future. self.latlon = self.configGet("string", "numerical2D", "latlon", optional=True) self.PlanetaryRadius = self.configGet("float", "numerical2D", "PlanetaryRadius", optional=True) if self.dimension == 2: from scipy.special import kei # Parse out input q0 into variables of imoprtance for solution if self.dimension == 1: try: # If these have already been set, e.g., by getters/setters, great! self.x self.q except: # Using [x, y, w] configuration file if self.q0.shape[1] == 2: self.x = self.q0[:,0] self.q = self.q0[:,1] else: sys.exit("For 1D (ungridded) SAS_NG configuration file, need [x,w] array. Your dimensions are: "+str(self.q0.shape)) else: try: # If these have already been set, e.g., by getters/setters, great! self.x self.u self.q except: # Using [x, y, w] configuration file if self.q0.shape[1] == 3: self.x = self.q0[:,0] self.y = self.q0[:,1] self.q = self.q0[:,2] else: sys.exit("For 2D (ungridded) SAS_NG configuration file, need [x,y,w] array. Your dimensions are: "+str(self.q0.shape)) # x, y are in absolute coordinates. Create a local grid reference to # these. This local grid, which starts at (0,0), is defined just so that # we have a way of running the model without defined real-world # coordinates self.x = self.x if self.dimension == 2: self.y = self.y # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self.q0 # Check if a seperate output set of x,y points has been defined # otherwise, set those values to None # First, try to load the arrays try: self.xw except: try: self.xw = self.configGet('string', "input", "xw", optional=True) if self.xw == '': self.xw = None except: self.xw = None # If strings, load arrays if type(self.xw) == str: self.xw = self.loadFile(self.xw) if self.dimension == 2: try: # already set by setter? self.yw except: try: self.yw = self.configGet('string', "input", "yw", optional=True ) if self.yw == '': self.yw = None except: self.yw = None # At this point, can check if we have both None or both defined if (self.xw is not None and self.yw is None) \ or (self.xw is None and self.yw is not None): sys.exit("SAS_NG output at specified points requires both xw and yw to be defined") # All right, now just finish defining if type(self.yw) == str: self.yw = self.loadFile(self.yw) elif self.yw is None: self.yw = self.y.copy() if self.xw is None: self.xw = self.x.copy()
[ "def", "SAS_NG", "(", "self", ")", ":", "if", "self", ".", "filename", ":", "self", ".", "Te", "=", "self", ".", "configGet", "(", "\"float\"", ",", "\"input\"", ",", "\"ElasticThickness\"", ")", "self", ".", "latlon", "=", "self", ".", "configGet", "(", "\"string\"", ",", "\"numerical2D\"", ",", "\"latlon\"", ",", "optional", "=", "True", ")", "self", ".", "PlanetaryRadius", "=", "self", ".", "configGet", "(", "\"float\"", ",", "\"numerical2D\"", ",", "\"PlanetaryRadius\"", ",", "optional", "=", "True", ")", "if", "self", ".", "dimension", "==", "2", ":", "from", "scipy", ".", "special", "import", "kei", "if", "self", ".", "dimension", "==", "1", ":", "try", ":", "self", ".", "x", "self", ".", "q", "except", ":", "if", "self", ".", "q0", ".", "shape", "[", "1", "]", "==", "2", ":", "self", ".", "x", "=", "self", ".", "q0", "[", ":", ",", "0", "]", "self", ".", "q", "=", "self", ".", "q0", "[", ":", ",", "1", "]", "else", ":", "sys", ".", "exit", "(", "\"For 1D (ungridded) SAS_NG configuration file, need [x,w] array. Your dimensions are: \"", "+", "str", "(", "self", ".", "q0", ".", "shape", ")", ")", "else", ":", "try", ":", "self", ".", "x", "self", ".", "u", "self", ".", "q", "except", ":", "if", "self", ".", "q0", ".", "shape", "[", "1", "]", "==", "3", ":", "self", ".", "x", "=", "self", ".", "q0", "[", ":", ",", "0", "]", "self", ".", "y", "=", "self", ".", "q0", "[", ":", ",", "1", "]", "self", ".", "q", "=", "self", ".", "q0", "[", ":", ",", "2", "]", "else", ":", "sys", ".", "exit", "(", "\"For 2D (ungridded) SAS_NG configuration file, need [x,y,w] array. Your dimensions are: \"", "+", "str", "(", "self", ".", "q0", ".", "shape", ")", ")", "self", ".", "x", "=", "self", ".", "x", "if", "self", ".", "dimension", "==", "2", ":", "self", ".", "y", "=", "self", ".", "y", "del", "self", ".", "q0", "try", ":", "self", ".", "xw", "except", ":", "try", ":", "self", ".", "xw", "=", "self", ".", "configGet", "(", "'string'", ",", "\"input\"", ",", "\"xw\"", ",", "optional", "=", "True", ")", "if", "self", ".", "xw", "==", "''", ":", "self", ".", "xw", "=", "None", "except", ":", "self", ".", "xw", "=", "None", "if", "type", "(", "self", ".", "xw", ")", "==", "str", ":", "self", ".", "xw", "=", "self", ".", "loadFile", "(", "self", ".", "xw", ")", "if", "self", ".", "dimension", "==", "2", ":", "try", ":", "self", ".", "yw", "except", ":", "try", ":", "self", ".", "yw", "=", "self", ".", "configGet", "(", "'string'", ",", "\"input\"", ",", "\"yw\"", ",", "optional", "=", "True", ")", "if", "self", ".", "yw", "==", "''", ":", "self", ".", "yw", "=", "None", "except", ":", "self", ".", "yw", "=", "None", "if", "(", "self", ".", "xw", "is", "not", "None", "and", "self", ".", "yw", "is", "None", ")", "or", "(", "self", ".", "xw", "is", "None", "and", "self", ".", "yw", "is", "not", "None", ")", ":", "sys", ".", "exit", "(", "\"SAS_NG output at specified points requires both xw and yw to be defined\"", ")", "if", "type", "(", "self", ".", "yw", ")", "==", "str", ":", "self", ".", "yw", "=", "self", ".", "loadFile", "(", "self", ".", "yw", ")", "elif", "self", ".", "yw", "is", "None", ":", "self", ".", "yw", "=", "self", ".", "y", ".", "copy", "(", ")", "if", "self", ".", "xw", "is", "None", ":", "self", ".", "xw", "=", "self", ".", "x", ".", "copy", "(", ")" ]
Set-up for the ungridded superposition of analytical solutions method for solving flexure
[ "Set", "-", "up", "for", "the", "ungridded", "superposition", "of", "analytical", "solutions", "method", "for", "solving", "flexure" ]
3ac32249375b0f8d342a142585d86ea4d905a5a0
https://github.com/awickert/gFlex/blob/3ac32249375b0f8d342a142585d86ea4d905a5a0/gflex/base.py#L1047-L1138
train
bionikspoon/pureyaml
pureyaml/_compat/singledispatch.py
_c3_mro
def _c3_mro(cls, abcs=None): """Computes the method resolution order using extended C3 linearization. If no *abcs* are given, the algorithm works exactly like the built-in C3 linearization used for method resolution. If given, *abcs* is a list of abstract base classes that should be inserted into the resulting MRO. Unrelated ABCs are ignored and don't end up in the result. The algorithm inserts ABCs where their functionality is introduced, i.e. issubclass(cls, abc) returns True for the class itself but returns False for all its direct base classes. Implicit ABCs for a given class (either registered or inferred from the presence of a special method like __len__) are inserted directly after the last ABC explicitly listed in the MRO of said class. If two implicit ABCs end up next to each other in the resulting MRO, their ordering depends on the order of types in *abcs*. """ for i, base in enumerate(reversed(cls.__bases__)): if hasattr(base, '__abstractmethods__'): boundary = len(cls.__bases__) - i break # Bases up to the last explicit ABC are considered first. else: boundary = 0 abcs = list(abcs) if abcs else [] explicit_bases = list(cls.__bases__[:boundary]) abstract_bases = [] other_bases = list(cls.__bases__[boundary:]) for base in abcs: if issubclass(cls, base) and not any(issubclass(b, base) for b in cls.__bases__): # If *cls* is the class that introduces behaviour described by # an ABC *base*, insert said ABC to its MRO. abstract_bases.append(base) for base in abstract_bases: abcs.remove(base) explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases] abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases] other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases] return _c3_merge( [[cls]] + explicit_c3_mros + abstract_c3_mros + other_c3_mros + [explicit_bases] + [abstract_bases] + [ other_bases])
python
def _c3_mro(cls, abcs=None): """Computes the method resolution order using extended C3 linearization. If no *abcs* are given, the algorithm works exactly like the built-in C3 linearization used for method resolution. If given, *abcs* is a list of abstract base classes that should be inserted into the resulting MRO. Unrelated ABCs are ignored and don't end up in the result. The algorithm inserts ABCs where their functionality is introduced, i.e. issubclass(cls, abc) returns True for the class itself but returns False for all its direct base classes. Implicit ABCs for a given class (either registered or inferred from the presence of a special method like __len__) are inserted directly after the last ABC explicitly listed in the MRO of said class. If two implicit ABCs end up next to each other in the resulting MRO, their ordering depends on the order of types in *abcs*. """ for i, base in enumerate(reversed(cls.__bases__)): if hasattr(base, '__abstractmethods__'): boundary = len(cls.__bases__) - i break # Bases up to the last explicit ABC are considered first. else: boundary = 0 abcs = list(abcs) if abcs else [] explicit_bases = list(cls.__bases__[:boundary]) abstract_bases = [] other_bases = list(cls.__bases__[boundary:]) for base in abcs: if issubclass(cls, base) and not any(issubclass(b, base) for b in cls.__bases__): # If *cls* is the class that introduces behaviour described by # an ABC *base*, insert said ABC to its MRO. abstract_bases.append(base) for base in abstract_bases: abcs.remove(base) explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases] abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases] other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases] return _c3_merge( [[cls]] + explicit_c3_mros + abstract_c3_mros + other_c3_mros + [explicit_bases] + [abstract_bases] + [ other_bases])
[ "def", "_c3_mro", "(", "cls", ",", "abcs", "=", "None", ")", ":", "for", "i", ",", "base", "in", "enumerate", "(", "reversed", "(", "cls", ".", "__bases__", ")", ")", ":", "if", "hasattr", "(", "base", ",", "'__abstractmethods__'", ")", ":", "boundary", "=", "len", "(", "cls", ".", "__bases__", ")", "-", "i", "break", "else", ":", "boundary", "=", "0", "abcs", "=", "list", "(", "abcs", ")", "if", "abcs", "else", "[", "]", "explicit_bases", "=", "list", "(", "cls", ".", "__bases__", "[", ":", "boundary", "]", ")", "abstract_bases", "=", "[", "]", "other_bases", "=", "list", "(", "cls", ".", "__bases__", "[", "boundary", ":", "]", ")", "for", "base", "in", "abcs", ":", "if", "issubclass", "(", "cls", ",", "base", ")", "and", "not", "any", "(", "issubclass", "(", "b", ",", "base", ")", "for", "b", "in", "cls", ".", "__bases__", ")", ":", "abstract_bases", ".", "append", "(", "base", ")", "for", "base", "in", "abstract_bases", ":", "abcs", ".", "remove", "(", "base", ")", "explicit_c3_mros", "=", "[", "_c3_mro", "(", "base", ",", "abcs", "=", "abcs", ")", "for", "base", "in", "explicit_bases", "]", "abstract_c3_mros", "=", "[", "_c3_mro", "(", "base", ",", "abcs", "=", "abcs", ")", "for", "base", "in", "abstract_bases", "]", "other_c3_mros", "=", "[", "_c3_mro", "(", "base", ",", "abcs", "=", "abcs", ")", "for", "base", "in", "other_bases", "]", "return", "_c3_merge", "(", "[", "[", "cls", "]", "]", "+", "explicit_c3_mros", "+", "abstract_c3_mros", "+", "other_c3_mros", "+", "[", "explicit_bases", "]", "+", "[", "abstract_bases", "]", "+", "[", "other_bases", "]", ")" ]
Computes the method resolution order using extended C3 linearization. If no *abcs* are given, the algorithm works exactly like the built-in C3 linearization used for method resolution. If given, *abcs* is a list of abstract base classes that should be inserted into the resulting MRO. Unrelated ABCs are ignored and don't end up in the result. The algorithm inserts ABCs where their functionality is introduced, i.e. issubclass(cls, abc) returns True for the class itself but returns False for all its direct base classes. Implicit ABCs for a given class (either registered or inferred from the presence of a special method like __len__) are inserted directly after the last ABC explicitly listed in the MRO of said class. If two implicit ABCs end up next to each other in the resulting MRO, their ordering depends on the order of types in *abcs*.
[ "Computes", "the", "method", "resolution", "order", "using", "extended", "C3", "linearization", "." ]
784830b907ca14525c4cecdb6ae35306f6f8a877
https://github.com/bionikspoon/pureyaml/blob/784830b907ca14525c4cecdb6ae35306f6f8a877/pureyaml/_compat/singledispatch.py#L49-L88
train
bionikspoon/pureyaml
pureyaml/_compat/singledispatch.py
singledispatch
def singledispatch(function): # noqa """Single-dispatch generic function decorator. Transforms a function into a generic function, which can have different behaviours depending upon the type of its first argument. The decorated function acts as the default implementation, and additional implementations can be registered using the register() attribute of the generic function. """ registry = {} dispatch_cache = WeakKeyDictionary() def ns(): pass ns.cache_token = None # noinspection PyIncorrectDocstring def dispatch(cls): """generic_func.dispatch(cls) -> <function implementation> Runs the dispatch algorithm to return the best available implementation for the given *cls* registered on *generic_func*. """ if ns.cache_token is not None: current_token = get_cache_token() if ns.cache_token != current_token: dispatch_cache.clear() ns.cache_token = current_token try: impl = dispatch_cache[cls] except KeyError: try: impl = registry[cls] except KeyError: impl = _find_impl(cls, registry) dispatch_cache[cls] = impl return impl # noinspection PyIncorrectDocstring def register(cls, func=None): """generic_func.register(cls, func) -> func Registers a new implementation for the given *cls* on a *generic_func*. """ if func is None: return lambda f: register(cls, f) registry[cls] = func if ns.cache_token is None and hasattr(cls, '__abstractmethods__'): ns.cache_token = get_cache_token() dispatch_cache.clear() return func def wrapper(*args, **kw): return dispatch(args[0].__class__)(*args, **kw) registry[object] = function wrapper.register = register wrapper.dispatch = dispatch wrapper.registry = MappingProxyType(registry) wrapper._clear_cache = dispatch_cache.clear update_wrapper(wrapper, function) return wrapper
python
def singledispatch(function): # noqa """Single-dispatch generic function decorator. Transforms a function into a generic function, which can have different behaviours depending upon the type of its first argument. The decorated function acts as the default implementation, and additional implementations can be registered using the register() attribute of the generic function. """ registry = {} dispatch_cache = WeakKeyDictionary() def ns(): pass ns.cache_token = None # noinspection PyIncorrectDocstring def dispatch(cls): """generic_func.dispatch(cls) -> <function implementation> Runs the dispatch algorithm to return the best available implementation for the given *cls* registered on *generic_func*. """ if ns.cache_token is not None: current_token = get_cache_token() if ns.cache_token != current_token: dispatch_cache.clear() ns.cache_token = current_token try: impl = dispatch_cache[cls] except KeyError: try: impl = registry[cls] except KeyError: impl = _find_impl(cls, registry) dispatch_cache[cls] = impl return impl # noinspection PyIncorrectDocstring def register(cls, func=None): """generic_func.register(cls, func) -> func Registers a new implementation for the given *cls* on a *generic_func*. """ if func is None: return lambda f: register(cls, f) registry[cls] = func if ns.cache_token is None and hasattr(cls, '__abstractmethods__'): ns.cache_token = get_cache_token() dispatch_cache.clear() return func def wrapper(*args, **kw): return dispatch(args[0].__class__)(*args, **kw) registry[object] = function wrapper.register = register wrapper.dispatch = dispatch wrapper.registry = MappingProxyType(registry) wrapper._clear_cache = dispatch_cache.clear update_wrapper(wrapper, function) return wrapper
[ "def", "singledispatch", "(", "function", ")", ":", "registry", "=", "{", "}", "dispatch_cache", "=", "WeakKeyDictionary", "(", ")", "def", "ns", "(", ")", ":", "pass", "ns", ".", "cache_token", "=", "None", "def", "dispatch", "(", "cls", ")", ":", "if", "ns", ".", "cache_token", "is", "not", "None", ":", "current_token", "=", "get_cache_token", "(", ")", "if", "ns", ".", "cache_token", "!=", "current_token", ":", "dispatch_cache", ".", "clear", "(", ")", "ns", ".", "cache_token", "=", "current_token", "try", ":", "impl", "=", "dispatch_cache", "[", "cls", "]", "except", "KeyError", ":", "try", ":", "impl", "=", "registry", "[", "cls", "]", "except", "KeyError", ":", "impl", "=", "_find_impl", "(", "cls", ",", "registry", ")", "dispatch_cache", "[", "cls", "]", "=", "impl", "return", "impl", "def", "register", "(", "cls", ",", "func", "=", "None", ")", ":", "if", "func", "is", "None", ":", "return", "lambda", "f", ":", "register", "(", "cls", ",", "f", ")", "registry", "[", "cls", "]", "=", "func", "if", "ns", ".", "cache_token", "is", "None", "and", "hasattr", "(", "cls", ",", "'__abstractmethods__'", ")", ":", "ns", ".", "cache_token", "=", "get_cache_token", "(", ")", "dispatch_cache", ".", "clear", "(", ")", "return", "func", "def", "wrapper", "(", "*", "args", ",", "**", "kw", ")", ":", "return", "dispatch", "(", "args", "[", "0", "]", ".", "__class__", ")", "(", "*", "args", ",", "**", "kw", ")", "registry", "[", "object", "]", "=", "function", "wrapper", ".", "register", "=", "register", "wrapper", ".", "dispatch", "=", "dispatch", "wrapper", ".", "registry", "=", "MappingProxyType", "(", "registry", ")", "wrapper", ".", "_clear_cache", "=", "dispatch_cache", ".", "clear", "update_wrapper", "(", "wrapper", ",", "function", ")", "return", "wrapper" ]
Single-dispatch generic function decorator. Transforms a function into a generic function, which can have different behaviours depending upon the type of its first argument. The decorated function acts as the default implementation, and additional implementations can be registered using the register() attribute of the generic function.
[ "Single", "-", "dispatch", "generic", "function", "decorator", "." ]
784830b907ca14525c4cecdb6ae35306f6f8a877
https://github.com/bionikspoon/pureyaml/blob/784830b907ca14525c4cecdb6ae35306f6f8a877/pureyaml/_compat/singledispatch.py#L170-L235
train
MasterOdin/pylint_runner
pylint_runner/main.py
Runner._parse_args
def _parse_args(self, args): """Parses any supplied command-line args and provides help text. """ parser = ArgumentParser(description="Runs pylint recursively on a directory") parser.add_argument( "-v", "--verbose", dest="verbose", action="store_true", default=False, help="Verbose mode (report which files were found for testing).", ) parser.add_argument( "--rcfile", dest="rcfile", action="store", default=".pylintrc", help="A relative or absolute path to your pylint rcfile. Defaults to\ `.pylintrc` at the current working directory", ) parser.add_argument( "-V", "--version", action="version", version="%(prog)s ({0}) for Python {1}".format(__version__, PYTHON_VERSION), ) options, _ = parser.parse_known_args(args) self.verbose = options.verbose if options.rcfile: if not os.path.isfile(options.rcfile): options.rcfile = os.getcwd() + "/" + options.rcfile self.rcfile = options.rcfile return options
python
def _parse_args(self, args): """Parses any supplied command-line args and provides help text. """ parser = ArgumentParser(description="Runs pylint recursively on a directory") parser.add_argument( "-v", "--verbose", dest="verbose", action="store_true", default=False, help="Verbose mode (report which files were found for testing).", ) parser.add_argument( "--rcfile", dest="rcfile", action="store", default=".pylintrc", help="A relative or absolute path to your pylint rcfile. Defaults to\ `.pylintrc` at the current working directory", ) parser.add_argument( "-V", "--version", action="version", version="%(prog)s ({0}) for Python {1}".format(__version__, PYTHON_VERSION), ) options, _ = parser.parse_known_args(args) self.verbose = options.verbose if options.rcfile: if not os.path.isfile(options.rcfile): options.rcfile = os.getcwd() + "/" + options.rcfile self.rcfile = options.rcfile return options
[ "def", "_parse_args", "(", "self", ",", "args", ")", ":", "parser", "=", "ArgumentParser", "(", "description", "=", "\"Runs pylint recursively on a directory\"", ")", "parser", ".", "add_argument", "(", "\"-v\"", ",", "\"--verbose\"", ",", "dest", "=", "\"verbose\"", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ",", "help", "=", "\"Verbose mode (report which files were found for testing).\"", ",", ")", "parser", ".", "add_argument", "(", "\"--rcfile\"", ",", "dest", "=", "\"rcfile\"", ",", "action", "=", "\"store\"", ",", "default", "=", "\".pylintrc\"", ",", "help", "=", "\"A relative or absolute path to your pylint rcfile. Defaults to\\ `.pylintrc` at the current working directory\"", ",", ")", "parser", ".", "add_argument", "(", "\"-V\"", ",", "\"--version\"", ",", "action", "=", "\"version\"", ",", "version", "=", "\"%(prog)s ({0}) for Python {1}\"", ".", "format", "(", "__version__", ",", "PYTHON_VERSION", ")", ",", ")", "options", ",", "_", "=", "parser", ".", "parse_known_args", "(", "args", ")", "self", ".", "verbose", "=", "options", ".", "verbose", "if", "options", ".", "rcfile", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "options", ".", "rcfile", ")", ":", "options", ".", "rcfile", "=", "os", ".", "getcwd", "(", ")", "+", "\"/\"", "+", "options", ".", "rcfile", "self", ".", "rcfile", "=", "options", ".", "rcfile", "return", "options" ]
Parses any supplied command-line args and provides help text.
[ "Parses", "any", "supplied", "command", "-", "line", "args", "and", "provides", "help", "text", "." ]
b8ec3324e568e172d38fc0b6fa6f5551b229de07
https://github.com/MasterOdin/pylint_runner/blob/b8ec3324e568e172d38fc0b6fa6f5551b229de07/pylint_runner/main.py#L39-L78
train
MasterOdin/pylint_runner
pylint_runner/main.py
Runner._parse_ignores
def _parse_ignores(self): """ Parse the ignores setting from the pylintrc file if available. """ error_message = ( colorama.Fore.RED + "{} does not appear to be a valid pylintrc file".format(self.rcfile) + colorama.Fore.RESET ) if not os.path.isfile(self.rcfile): if not self._is_using_default_rcfile(): print(error_message) sys.exit(1) else: return config = configparser.ConfigParser() try: config.read(self.rcfile) except configparser.MissingSectionHeaderError: print(error_message) sys.exit(1) if config.has_section("MASTER") and config.get("MASTER", "ignore"): self.ignore_folders += config.get("MASTER", "ignore").split(",")
python
def _parse_ignores(self): """ Parse the ignores setting from the pylintrc file if available. """ error_message = ( colorama.Fore.RED + "{} does not appear to be a valid pylintrc file".format(self.rcfile) + colorama.Fore.RESET ) if not os.path.isfile(self.rcfile): if not self._is_using_default_rcfile(): print(error_message) sys.exit(1) else: return config = configparser.ConfigParser() try: config.read(self.rcfile) except configparser.MissingSectionHeaderError: print(error_message) sys.exit(1) if config.has_section("MASTER") and config.get("MASTER", "ignore"): self.ignore_folders += config.get("MASTER", "ignore").split(",")
[ "def", "_parse_ignores", "(", "self", ")", ":", "error_message", "=", "(", "colorama", ".", "Fore", ".", "RED", "+", "\"{} does not appear to be a valid pylintrc file\"", ".", "format", "(", "self", ".", "rcfile", ")", "+", "colorama", ".", "Fore", ".", "RESET", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "self", ".", "rcfile", ")", ":", "if", "not", "self", ".", "_is_using_default_rcfile", "(", ")", ":", "print", "(", "error_message", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "return", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "try", ":", "config", ".", "read", "(", "self", ".", "rcfile", ")", "except", "configparser", ".", "MissingSectionHeaderError", ":", "print", "(", "error_message", ")", "sys", ".", "exit", "(", "1", ")", "if", "config", ".", "has_section", "(", "\"MASTER\"", ")", "and", "config", ".", "get", "(", "\"MASTER\"", ",", "\"ignore\"", ")", ":", "self", ".", "ignore_folders", "+=", "config", ".", "get", "(", "\"MASTER\"", ",", "\"ignore\"", ")", ".", "split", "(", "\",\"", ")" ]
Parse the ignores setting from the pylintrc file if available.
[ "Parse", "the", "ignores", "setting", "from", "the", "pylintrc", "file", "if", "available", "." ]
b8ec3324e568e172d38fc0b6fa6f5551b229de07
https://github.com/MasterOdin/pylint_runner/blob/b8ec3324e568e172d38fc0b6fa6f5551b229de07/pylint_runner/main.py#L80-L104
train
MasterOdin/pylint_runner
pylint_runner/main.py
Runner.run
def run(self, output=None, error=None): """ Runs pylint on all python files in the current directory """ pylint_output = output if output is not None else sys.stdout pylint_error = error if error is not None else sys.stderr savedout, savederr = sys.__stdout__, sys.__stderr__ sys.stdout = pylint_output sys.stderr = pylint_error pylint_files = self.get_files_from_dir(os.curdir) self._print_line( "Using pylint " + colorama.Fore.RED + pylint.__version__ + colorama.Fore.RESET + " for python " + colorama.Fore.RED + PYTHON_VERSION + colorama.Fore.RESET ) self._print_line("pylint running on the following files:") for pylint_file in pylint_files: # we need to recast this as a string, else pylint enters an endless recursion split_file = str(pylint_file).split("/") split_file[-1] = colorama.Fore.CYAN + split_file[-1] + colorama.Fore.RESET pylint_file = "/".join(split_file) self._print_line("- " + pylint_file) self._print_line("----") if not self._is_using_default_rcfile(): self.args += ["--rcfile={}".format(self.rcfile)] exit_kwarg = {"do_exit": False} run = pylint.lint.Run(self.args + pylint_files, **exit_kwarg) sys.stdout = savedout sys.stderr = savederr sys.exit(run.linter.msg_status)
python
def run(self, output=None, error=None): """ Runs pylint on all python files in the current directory """ pylint_output = output if output is not None else sys.stdout pylint_error = error if error is not None else sys.stderr savedout, savederr = sys.__stdout__, sys.__stderr__ sys.stdout = pylint_output sys.stderr = pylint_error pylint_files = self.get_files_from_dir(os.curdir) self._print_line( "Using pylint " + colorama.Fore.RED + pylint.__version__ + colorama.Fore.RESET + " for python " + colorama.Fore.RED + PYTHON_VERSION + colorama.Fore.RESET ) self._print_line("pylint running on the following files:") for pylint_file in pylint_files: # we need to recast this as a string, else pylint enters an endless recursion split_file = str(pylint_file).split("/") split_file[-1] = colorama.Fore.CYAN + split_file[-1] + colorama.Fore.RESET pylint_file = "/".join(split_file) self._print_line("- " + pylint_file) self._print_line("----") if not self._is_using_default_rcfile(): self.args += ["--rcfile={}".format(self.rcfile)] exit_kwarg = {"do_exit": False} run = pylint.lint.Run(self.args + pylint_files, **exit_kwarg) sys.stdout = savedout sys.stderr = savederr sys.exit(run.linter.msg_status)
[ "def", "run", "(", "self", ",", "output", "=", "None", ",", "error", "=", "None", ")", ":", "pylint_output", "=", "output", "if", "output", "is", "not", "None", "else", "sys", ".", "stdout", "pylint_error", "=", "error", "if", "error", "is", "not", "None", "else", "sys", ".", "stderr", "savedout", ",", "savederr", "=", "sys", ".", "__stdout__", ",", "sys", ".", "__stderr__", "sys", ".", "stdout", "=", "pylint_output", "sys", ".", "stderr", "=", "pylint_error", "pylint_files", "=", "self", ".", "get_files_from_dir", "(", "os", ".", "curdir", ")", "self", ".", "_print_line", "(", "\"Using pylint \"", "+", "colorama", ".", "Fore", ".", "RED", "+", "pylint", ".", "__version__", "+", "colorama", ".", "Fore", ".", "RESET", "+", "\" for python \"", "+", "colorama", ".", "Fore", ".", "RED", "+", "PYTHON_VERSION", "+", "colorama", ".", "Fore", ".", "RESET", ")", "self", ".", "_print_line", "(", "\"pylint running on the following files:\"", ")", "for", "pylint_file", "in", "pylint_files", ":", "split_file", "=", "str", "(", "pylint_file", ")", ".", "split", "(", "\"/\"", ")", "split_file", "[", "-", "1", "]", "=", "colorama", ".", "Fore", ".", "CYAN", "+", "split_file", "[", "-", "1", "]", "+", "colorama", ".", "Fore", ".", "RESET", "pylint_file", "=", "\"/\"", ".", "join", "(", "split_file", ")", "self", ".", "_print_line", "(", "\"- \"", "+", "pylint_file", ")", "self", ".", "_print_line", "(", "\"----\"", ")", "if", "not", "self", ".", "_is_using_default_rcfile", "(", ")", ":", "self", ".", "args", "+=", "[", "\"--rcfile={}\"", ".", "format", "(", "self", ".", "rcfile", ")", "]", "exit_kwarg", "=", "{", "\"do_exit\"", ":", "False", "}", "run", "=", "pylint", ".", "lint", ".", "Run", "(", "self", ".", "args", "+", "pylint_files", ",", "**", "exit_kwarg", ")", "sys", ".", "stdout", "=", "savedout", "sys", ".", "stderr", "=", "savederr", "sys", ".", "exit", "(", "run", ".", "linter", ".", "msg_status", ")" ]
Runs pylint on all python files in the current directory
[ "Runs", "pylint", "on", "all", "python", "files", "in", "the", "current", "directory" ]
b8ec3324e568e172d38fc0b6fa6f5551b229de07
https://github.com/MasterOdin/pylint_runner/blob/b8ec3324e568e172d38fc0b6fa6f5551b229de07/pylint_runner/main.py#L145-L184
train
bionikspoon/pureyaml
pureyaml/grammar/utils.py
strict
def strict(*types): """Decorator, type check production rule output""" def decorate(func): @wraps(func) def wrapper(self, p): func(self, p) if not isinstance(p[0], types): raise YAMLStrictTypeError(p[0], types, func) wrapper.co_firstlineno = func.__code__.co_firstlineno return wrapper return decorate
python
def strict(*types): """Decorator, type check production rule output""" def decorate(func): @wraps(func) def wrapper(self, p): func(self, p) if not isinstance(p[0], types): raise YAMLStrictTypeError(p[0], types, func) wrapper.co_firstlineno = func.__code__.co_firstlineno return wrapper return decorate
[ "def", "strict", "(", "*", "types", ")", ":", "def", "decorate", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "self", ",", "p", ")", ":", "func", "(", "self", ",", "p", ")", "if", "not", "isinstance", "(", "p", "[", "0", "]", ",", "types", ")", ":", "raise", "YAMLStrictTypeError", "(", "p", "[", "0", "]", ",", "types", ",", "func", ")", "wrapper", ".", "co_firstlineno", "=", "func", ".", "__code__", ".", "co_firstlineno", "return", "wrapper", "return", "decorate" ]
Decorator, type check production rule output
[ "Decorator", "type", "check", "production", "rule", "output" ]
784830b907ca14525c4cecdb6ae35306f6f8a877
https://github.com/bionikspoon/pureyaml/blob/784830b907ca14525c4cecdb6ae35306f6f8a877/pureyaml/grammar/utils.py#L11-L24
train
bionikspoon/pureyaml
pureyaml/grammar/utils.py
find_column
def find_column(t): """Get cursor position, based on previous newline""" pos = t.lexer.lexpos data = t.lexer.lexdata last_cr = data.rfind('\n', 0, pos) if last_cr < 0: last_cr = -1 column = pos - last_cr return column
python
def find_column(t): """Get cursor position, based on previous newline""" pos = t.lexer.lexpos data = t.lexer.lexdata last_cr = data.rfind('\n', 0, pos) if last_cr < 0: last_cr = -1 column = pos - last_cr return column
[ "def", "find_column", "(", "t", ")", ":", "pos", "=", "t", ".", "lexer", ".", "lexpos", "data", "=", "t", ".", "lexer", ".", "lexdata", "last_cr", "=", "data", ".", "rfind", "(", "'\\n'", ",", "0", ",", "pos", ")", "if", "last_cr", "<", "0", ":", "last_cr", "=", "-", "1", "column", "=", "pos", "-", "last_cr", "return", "column" ]
Get cursor position, based on previous newline
[ "Get", "cursor", "position", "based", "on", "previous", "newline" ]
784830b907ca14525c4cecdb6ae35306f6f8a877
https://github.com/bionikspoon/pureyaml/blob/784830b907ca14525c4cecdb6ae35306f6f8a877/pureyaml/grammar/utils.py#L27-L35
train
jaraco/jaraco.windows
jaraco/windows/power.py
no_sleep
def no_sleep(): """ Context that prevents the computer from going to sleep. """ mode = power.ES.continuous | power.ES.system_required handle_nonzero_success(power.SetThreadExecutionState(mode)) try: yield finally: handle_nonzero_success(power.SetThreadExecutionState(power.ES.continuous))
python
def no_sleep(): """ Context that prevents the computer from going to sleep. """ mode = power.ES.continuous | power.ES.system_required handle_nonzero_success(power.SetThreadExecutionState(mode)) try: yield finally: handle_nonzero_success(power.SetThreadExecutionState(power.ES.continuous))
[ "def", "no_sleep", "(", ")", ":", "mode", "=", "power", ".", "ES", ".", "continuous", "|", "power", ".", "ES", ".", "system_required", "handle_nonzero_success", "(", "power", ".", "SetThreadExecutionState", "(", "mode", ")", ")", "try", ":", "yield", "finally", ":", "handle_nonzero_success", "(", "power", ".", "SetThreadExecutionState", "(", "power", ".", "ES", ".", "continuous", ")", ")" ]
Context that prevents the computer from going to sleep.
[ "Context", "that", "prevents", "the", "computer", "from", "going", "to", "sleep", "." ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/power.py#L68-L77
train
theno/fabsetup
fabsetup/fabfile/setup/service/selfoss.py
selfoss
def selfoss(reset_password=False): '''Install, update and set up selfoss. This selfoss installation uses sqlite (selfoss-default), php5-fpm and nginx. The connection is https-only and secured by a letsencrypt certificate. This certificate must be created separately with task setup.server_letsencrypt. More infos: https://selfoss.aditu.de/ https://github.com/SSilence/selfoss/wiki https://www.heise.de/ct/ausgabe/2016-13-RSS-Reader-Selfoss-hat-die-Nachrichtenlage-im-Blick-3228045.html https://ct.de/yqp7 ''' hostname = re.sub(r'^[^@]+@', '', env.host) # without username if any sitename = query_input( question='\nEnter site-name of Your trac web service', default=flo('selfoss.{hostname}')) username = env.user site_dir = flo('/home/{username}/sites/{sitename}') checkout_latest_release_of_selfoss() create_directory_structure(site_dir) restored = install_selfoss(sitename, site_dir, username) nginx_site_config(username, sitename, hostname) enable_php5_socket_file() if not restored or reset_password: setup_selfoss_user(username, sitename, site_dir) print_msg('\n## reload nginx and restart php\n') run('sudo service nginx reload') run('sudo service php5-fpm restart')
python
def selfoss(reset_password=False): '''Install, update and set up selfoss. This selfoss installation uses sqlite (selfoss-default), php5-fpm and nginx. The connection is https-only and secured by a letsencrypt certificate. This certificate must be created separately with task setup.server_letsencrypt. More infos: https://selfoss.aditu.de/ https://github.com/SSilence/selfoss/wiki https://www.heise.de/ct/ausgabe/2016-13-RSS-Reader-Selfoss-hat-die-Nachrichtenlage-im-Blick-3228045.html https://ct.de/yqp7 ''' hostname = re.sub(r'^[^@]+@', '', env.host) # without username if any sitename = query_input( question='\nEnter site-name of Your trac web service', default=flo('selfoss.{hostname}')) username = env.user site_dir = flo('/home/{username}/sites/{sitename}') checkout_latest_release_of_selfoss() create_directory_structure(site_dir) restored = install_selfoss(sitename, site_dir, username) nginx_site_config(username, sitename, hostname) enable_php5_socket_file() if not restored or reset_password: setup_selfoss_user(username, sitename, site_dir) print_msg('\n## reload nginx and restart php\n') run('sudo service nginx reload') run('sudo service php5-fpm restart')
[ "def", "selfoss", "(", "reset_password", "=", "False", ")", ":", "hostname", "=", "re", ".", "sub", "(", "r'^[^@]+@'", ",", "''", ",", "env", ".", "host", ")", "sitename", "=", "query_input", "(", "question", "=", "'\\nEnter site-name of Your trac web service'", ",", "default", "=", "flo", "(", "'selfoss.{hostname}'", ")", ")", "username", "=", "env", ".", "user", "site_dir", "=", "flo", "(", "'/home/{username}/sites/{sitename}'", ")", "checkout_latest_release_of_selfoss", "(", ")", "create_directory_structure", "(", "site_dir", ")", "restored", "=", "install_selfoss", "(", "sitename", ",", "site_dir", ",", "username", ")", "nginx_site_config", "(", "username", ",", "sitename", ",", "hostname", ")", "enable_php5_socket_file", "(", ")", "if", "not", "restored", "or", "reset_password", ":", "setup_selfoss_user", "(", "username", ",", "sitename", ",", "site_dir", ")", "print_msg", "(", "'\\n## reload nginx and restart php\\n'", ")", "run", "(", "'sudo service nginx reload'", ")", "run", "(", "'sudo service php5-fpm restart'", ")" ]
Install, update and set up selfoss. This selfoss installation uses sqlite (selfoss-default), php5-fpm and nginx. The connection is https-only and secured by a letsencrypt certificate. This certificate must be created separately with task setup.server_letsencrypt. More infos: https://selfoss.aditu.de/ https://github.com/SSilence/selfoss/wiki https://www.heise.de/ct/ausgabe/2016-13-RSS-Reader-Selfoss-hat-die-Nachrichtenlage-im-Blick-3228045.html https://ct.de/yqp7
[ "Install", "update", "and", "set", "up", "selfoss", "." ]
ced728abff93551ba5677e63bc1bdc0ef5ca5777
https://github.com/theno/fabsetup/blob/ced728abff93551ba5677e63bc1bdc0ef5ca5777/fabsetup/fabfile/setup/service/selfoss.py#L19-L54
train
NearHuscarl/py-currency
currency/cache.py
get_cache_path
def get_cache_path(filename): """ get file path """ cwd = os.path.dirname(os.path.realpath(__file__)) return os.path.join(cwd, filename)
python
def get_cache_path(filename): """ get file path """ cwd = os.path.dirname(os.path.realpath(__file__)) return os.path.join(cwd, filename)
[ "def", "get_cache_path", "(", "filename", ")", ":", "cwd", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", "return", "os", ".", "path", ".", "join", "(", "cwd", ",", "filename", ")" ]
get file path
[ "get", "file", "path" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/cache.py#L8-L11
train
jaraco/jaraco.windows
jaraco/windows/privilege.py
get_process_token
def get_process_token(): """ Get the current process token """ token = wintypes.HANDLE() res = process.OpenProcessToken( process.GetCurrentProcess(), process.TOKEN_ALL_ACCESS, token) if not res > 0: raise RuntimeError("Couldn't get process token") return token
python
def get_process_token(): """ Get the current process token """ token = wintypes.HANDLE() res = process.OpenProcessToken( process.GetCurrentProcess(), process.TOKEN_ALL_ACCESS, token) if not res > 0: raise RuntimeError("Couldn't get process token") return token
[ "def", "get_process_token", "(", ")", ":", "token", "=", "wintypes", ".", "HANDLE", "(", ")", "res", "=", "process", ".", "OpenProcessToken", "(", "process", ".", "GetCurrentProcess", "(", ")", ",", "process", ".", "TOKEN_ALL_ACCESS", ",", "token", ")", "if", "not", "res", ">", "0", ":", "raise", "RuntimeError", "(", "\"Couldn't get process token\"", ")", "return", "token" ]
Get the current process token
[ "Get", "the", "current", "process", "token" ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/privilege.py#L11-L20
train
jaraco/jaraco.windows
jaraco/windows/privilege.py
get_symlink_luid
def get_symlink_luid(): """ Get the LUID for the SeCreateSymbolicLinkPrivilege """ symlink_luid = privilege.LUID() res = privilege.LookupPrivilegeValue( None, "SeCreateSymbolicLinkPrivilege", symlink_luid) if not res > 0: raise RuntimeError("Couldn't lookup privilege value") return symlink_luid
python
def get_symlink_luid(): """ Get the LUID for the SeCreateSymbolicLinkPrivilege """ symlink_luid = privilege.LUID() res = privilege.LookupPrivilegeValue( None, "SeCreateSymbolicLinkPrivilege", symlink_luid) if not res > 0: raise RuntimeError("Couldn't lookup privilege value") return symlink_luid
[ "def", "get_symlink_luid", "(", ")", ":", "symlink_luid", "=", "privilege", ".", "LUID", "(", ")", "res", "=", "privilege", ".", "LookupPrivilegeValue", "(", "None", ",", "\"SeCreateSymbolicLinkPrivilege\"", ",", "symlink_luid", ")", "if", "not", "res", ">", "0", ":", "raise", "RuntimeError", "(", "\"Couldn't lookup privilege value\"", ")", "return", "symlink_luid" ]
Get the LUID for the SeCreateSymbolicLinkPrivilege
[ "Get", "the", "LUID", "for", "the", "SeCreateSymbolicLinkPrivilege" ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/privilege.py#L23-L32
train
jaraco/jaraco.windows
jaraco/windows/privilege.py
get_privilege_information
def get_privilege_information(): """ Get all privileges associated with the current process. """ # first call with zero length to determine what size buffer we need return_length = wintypes.DWORD() params = [ get_process_token(), privilege.TOKEN_INFORMATION_CLASS.TokenPrivileges, None, 0, return_length, ] res = privilege.GetTokenInformation(*params) # assume we now have the necessary length in return_length buffer = ctypes.create_string_buffer(return_length.value) params[2] = buffer params[3] = return_length.value res = privilege.GetTokenInformation(*params) assert res > 0, "Error in second GetTokenInformation (%d)" % res privileges = ctypes.cast( buffer, ctypes.POINTER(privilege.TOKEN_PRIVILEGES)).contents return privileges
python
def get_privilege_information(): """ Get all privileges associated with the current process. """ # first call with zero length to determine what size buffer we need return_length = wintypes.DWORD() params = [ get_process_token(), privilege.TOKEN_INFORMATION_CLASS.TokenPrivileges, None, 0, return_length, ] res = privilege.GetTokenInformation(*params) # assume we now have the necessary length in return_length buffer = ctypes.create_string_buffer(return_length.value) params[2] = buffer params[3] = return_length.value res = privilege.GetTokenInformation(*params) assert res > 0, "Error in second GetTokenInformation (%d)" % res privileges = ctypes.cast( buffer, ctypes.POINTER(privilege.TOKEN_PRIVILEGES)).contents return privileges
[ "def", "get_privilege_information", "(", ")", ":", "return_length", "=", "wintypes", ".", "DWORD", "(", ")", "params", "=", "[", "get_process_token", "(", ")", ",", "privilege", ".", "TOKEN_INFORMATION_CLASS", ".", "TokenPrivileges", ",", "None", ",", "0", ",", "return_length", ",", "]", "res", "=", "privilege", ".", "GetTokenInformation", "(", "*", "params", ")", "buffer", "=", "ctypes", ".", "create_string_buffer", "(", "return_length", ".", "value", ")", "params", "[", "2", "]", "=", "buffer", "params", "[", "3", "]", "=", "return_length", ".", "value", "res", "=", "privilege", ".", "GetTokenInformation", "(", "*", "params", ")", "assert", "res", ">", "0", ",", "\"Error in second GetTokenInformation (%d)\"", "%", "res", "privileges", "=", "ctypes", ".", "cast", "(", "buffer", ",", "ctypes", ".", "POINTER", "(", "privilege", ".", "TOKEN_PRIVILEGES", ")", ")", ".", "contents", "return", "privileges" ]
Get all privileges associated with the current process.
[ "Get", "all", "privileges", "associated", "with", "the", "current", "process", "." ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/privilege.py#L35-L63
train
jaraco/jaraco.windows
jaraco/windows/privilege.py
enable_symlink_privilege
def enable_symlink_privilege(): """ Try to assign the symlink privilege to the current process token. Return True if the assignment is successful. """ # create a space in memory for a TOKEN_PRIVILEGES structure # with one element size = ctypes.sizeof(privilege.TOKEN_PRIVILEGES) size += ctypes.sizeof(privilege.LUID_AND_ATTRIBUTES) buffer = ctypes.create_string_buffer(size) tp = ctypes.cast(buffer, ctypes.POINTER(privilege.TOKEN_PRIVILEGES)).contents tp.count = 1 tp.get_array()[0].enable() tp.get_array()[0].LUID = get_symlink_luid() token = get_process_token() res = privilege.AdjustTokenPrivileges(token, False, tp, 0, None, None) if res == 0: raise RuntimeError("Error in AdjustTokenPrivileges") ERROR_NOT_ALL_ASSIGNED = 1300 return ctypes.windll.kernel32.GetLastError() != ERROR_NOT_ALL_ASSIGNED
python
def enable_symlink_privilege(): """ Try to assign the symlink privilege to the current process token. Return True if the assignment is successful. """ # create a space in memory for a TOKEN_PRIVILEGES structure # with one element size = ctypes.sizeof(privilege.TOKEN_PRIVILEGES) size += ctypes.sizeof(privilege.LUID_AND_ATTRIBUTES) buffer = ctypes.create_string_buffer(size) tp = ctypes.cast(buffer, ctypes.POINTER(privilege.TOKEN_PRIVILEGES)).contents tp.count = 1 tp.get_array()[0].enable() tp.get_array()[0].LUID = get_symlink_luid() token = get_process_token() res = privilege.AdjustTokenPrivileges(token, False, tp, 0, None, None) if res == 0: raise RuntimeError("Error in AdjustTokenPrivileges") ERROR_NOT_ALL_ASSIGNED = 1300 return ctypes.windll.kernel32.GetLastError() != ERROR_NOT_ALL_ASSIGNED
[ "def", "enable_symlink_privilege", "(", ")", ":", "size", "=", "ctypes", ".", "sizeof", "(", "privilege", ".", "TOKEN_PRIVILEGES", ")", "size", "+=", "ctypes", ".", "sizeof", "(", "privilege", ".", "LUID_AND_ATTRIBUTES", ")", "buffer", "=", "ctypes", ".", "create_string_buffer", "(", "size", ")", "tp", "=", "ctypes", ".", "cast", "(", "buffer", ",", "ctypes", ".", "POINTER", "(", "privilege", ".", "TOKEN_PRIVILEGES", ")", ")", ".", "contents", "tp", ".", "count", "=", "1", "tp", ".", "get_array", "(", ")", "[", "0", "]", ".", "enable", "(", ")", "tp", ".", "get_array", "(", ")", "[", "0", "]", ".", "LUID", "=", "get_symlink_luid", "(", ")", "token", "=", "get_process_token", "(", ")", "res", "=", "privilege", ".", "AdjustTokenPrivileges", "(", "token", ",", "False", ",", "tp", ",", "0", ",", "None", ",", "None", ")", "if", "res", "==", "0", ":", "raise", "RuntimeError", "(", "\"Error in AdjustTokenPrivileges\"", ")", "ERROR_NOT_ALL_ASSIGNED", "=", "1300", "return", "ctypes", ".", "windll", ".", "kernel32", ".", "GetLastError", "(", ")", "!=", "ERROR_NOT_ALL_ASSIGNED" ]
Try to assign the symlink privilege to the current process token. Return True if the assignment is successful.
[ "Try", "to", "assign", "the", "symlink", "privilege", "to", "the", "current", "process", "token", ".", "Return", "True", "if", "the", "assignment", "is", "successful", "." ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/privilege.py#L75-L95
train
jaraco/jaraco.windows
jaraco/windows/privilege.py
grant_symlink_privilege
def grant_symlink_privilege(who, machine=''): """ Grant the 'create symlink' privilege to who. Based on http://support.microsoft.com/kb/132958 """ flags = security.POLICY_CREATE_ACCOUNT | security.POLICY_LOOKUP_NAMES policy = OpenPolicy(machine, flags) return policy
python
def grant_symlink_privilege(who, machine=''): """ Grant the 'create symlink' privilege to who. Based on http://support.microsoft.com/kb/132958 """ flags = security.POLICY_CREATE_ACCOUNT | security.POLICY_LOOKUP_NAMES policy = OpenPolicy(machine, flags) return policy
[ "def", "grant_symlink_privilege", "(", "who", ",", "machine", "=", "''", ")", ":", "flags", "=", "security", ".", "POLICY_CREATE_ACCOUNT", "|", "security", ".", "POLICY_LOOKUP_NAMES", "policy", "=", "OpenPolicy", "(", "machine", ",", "flags", ")", "return", "policy" ]
Grant the 'create symlink' privilege to who. Based on http://support.microsoft.com/kb/132958
[ "Grant", "the", "create", "symlink", "privilege", "to", "who", "." ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/privilege.py#L123-L131
train
theno/fabsetup
fabsetup/addons.py
add_tasks_r
def add_tasks_r(addon_module, package_module, package_name): '''Recursively iterate through 'package_module' and add every fabric task to the 'addon_module' keeping the task hierarchy. Args: addon_module(types.ModuleType) package_module(types.ModuleType) package_name(str): Required, to avoid redundant addition of tasks Return: None ''' module_dict = package_module.__dict__ for attr_name, attr_val in module_dict.items(): if isinstance(attr_val, fabric.tasks.WrappedCallableTask): addon_module.__dict__[attr_name] = attr_val elif attr_name != package_name \ and isinstance(attr_val, types.ModuleType) \ and attr_val.__name__.startswith('fabsetup_') \ and attr_name.split('.')[-1] != package_name: submodule_name = flo('{addon_module.__name__}.{attr_name}') submodule = get_or_create_module_r(submodule_name) package_module = attr_val add_tasks_r(submodule, package_module, package_name) addon_module.__dict__[attr_name] = submodule
python
def add_tasks_r(addon_module, package_module, package_name): '''Recursively iterate through 'package_module' and add every fabric task to the 'addon_module' keeping the task hierarchy. Args: addon_module(types.ModuleType) package_module(types.ModuleType) package_name(str): Required, to avoid redundant addition of tasks Return: None ''' module_dict = package_module.__dict__ for attr_name, attr_val in module_dict.items(): if isinstance(attr_val, fabric.tasks.WrappedCallableTask): addon_module.__dict__[attr_name] = attr_val elif attr_name != package_name \ and isinstance(attr_val, types.ModuleType) \ and attr_val.__name__.startswith('fabsetup_') \ and attr_name.split('.')[-1] != package_name: submodule_name = flo('{addon_module.__name__}.{attr_name}') submodule = get_or_create_module_r(submodule_name) package_module = attr_val add_tasks_r(submodule, package_module, package_name) addon_module.__dict__[attr_name] = submodule
[ "def", "add_tasks_r", "(", "addon_module", ",", "package_module", ",", "package_name", ")", ":", "module_dict", "=", "package_module", ".", "__dict__", "for", "attr_name", ",", "attr_val", "in", "module_dict", ".", "items", "(", ")", ":", "if", "isinstance", "(", "attr_val", ",", "fabric", ".", "tasks", ".", "WrappedCallableTask", ")", ":", "addon_module", ".", "__dict__", "[", "attr_name", "]", "=", "attr_val", "elif", "attr_name", "!=", "package_name", "and", "isinstance", "(", "attr_val", ",", "types", ".", "ModuleType", ")", "and", "attr_val", ".", "__name__", ".", "startswith", "(", "'fabsetup_'", ")", "and", "attr_name", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "!=", "package_name", ":", "submodule_name", "=", "flo", "(", "'{addon_module.__name__}.{attr_name}'", ")", "submodule", "=", "get_or_create_module_r", "(", "submodule_name", ")", "package_module", "=", "attr_val", "add_tasks_r", "(", "submodule", ",", "package_module", ",", "package_name", ")", "addon_module", ".", "__dict__", "[", "attr_name", "]", "=", "submodule" ]
Recursively iterate through 'package_module' and add every fabric task to the 'addon_module' keeping the task hierarchy. Args: addon_module(types.ModuleType) package_module(types.ModuleType) package_name(str): Required, to avoid redundant addition of tasks Return: None
[ "Recursively", "iterate", "through", "package_module", "and", "add", "every", "fabric", "task", "to", "the", "addon_module", "keeping", "the", "task", "hierarchy", "." ]
ced728abff93551ba5677e63bc1bdc0ef5ca5777
https://github.com/theno/fabsetup/blob/ced728abff93551ba5677e63bc1bdc0ef5ca5777/fabsetup/addons.py#L50-L77
train
theno/fabsetup
fabsetup/addons.py
load_addon
def load_addon(username, package_name, _globals): '''Load an fabsetup addon given by 'package_name' and hook it in the base task namespace 'username'. Args: username(str) package_name(str) _globals(dict): the globals() namespace of the fabric script. Return: None ''' addon_module = get_or_create_module_r(username) package_module = __import__(package_name) add_tasks_r(addon_module, package_module, package_name) _globals.update({username: addon_module}) del package_module del addon_module
python
def load_addon(username, package_name, _globals): '''Load an fabsetup addon given by 'package_name' and hook it in the base task namespace 'username'. Args: username(str) package_name(str) _globals(dict): the globals() namespace of the fabric script. Return: None ''' addon_module = get_or_create_module_r(username) package_module = __import__(package_name) add_tasks_r(addon_module, package_module, package_name) _globals.update({username: addon_module}) del package_module del addon_module
[ "def", "load_addon", "(", "username", ",", "package_name", ",", "_globals", ")", ":", "addon_module", "=", "get_or_create_module_r", "(", "username", ")", "package_module", "=", "__import__", "(", "package_name", ")", "add_tasks_r", "(", "addon_module", ",", "package_module", ",", "package_name", ")", "_globals", ".", "update", "(", "{", "username", ":", "addon_module", "}", ")", "del", "package_module", "del", "addon_module" ]
Load an fabsetup addon given by 'package_name' and hook it in the base task namespace 'username'. Args: username(str) package_name(str) _globals(dict): the globals() namespace of the fabric script. Return: None
[ "Load", "an", "fabsetup", "addon", "given", "by", "package_name", "and", "hook", "it", "in", "the", "base", "task", "namespace", "username", "." ]
ced728abff93551ba5677e63bc1bdc0ef5ca5777
https://github.com/theno/fabsetup/blob/ced728abff93551ba5677e63bc1bdc0ef5ca5777/fabsetup/addons.py#L80-L96
train
theno/fabsetup
fabsetup/addons.py
load_pip_addons
def load_pip_addons(_globals): '''Load all known fabsetup addons which are installed as pypi pip-packages. Args: _globals(dict): the globals() namespace of the fabric script. Return: None ''' for package_name in known_pip_addons: _, username = package_username(package_name) try: load_addon(username, package_name.replace('-', '_'), _globals) except ImportError: pass
python
def load_pip_addons(_globals): '''Load all known fabsetup addons which are installed as pypi pip-packages. Args: _globals(dict): the globals() namespace of the fabric script. Return: None ''' for package_name in known_pip_addons: _, username = package_username(package_name) try: load_addon(username, package_name.replace('-', '_'), _globals) except ImportError: pass
[ "def", "load_pip_addons", "(", "_globals", ")", ":", "for", "package_name", "in", "known_pip_addons", ":", "_", ",", "username", "=", "package_username", "(", "package_name", ")", "try", ":", "load_addon", "(", "username", ",", "package_name", ".", "replace", "(", "'-'", ",", "'_'", ")", ",", "_globals", ")", "except", "ImportError", ":", "pass" ]
Load all known fabsetup addons which are installed as pypi pip-packages. Args: _globals(dict): the globals() namespace of the fabric script. Return: None
[ "Load", "all", "known", "fabsetup", "addons", "which", "are", "installed", "as", "pypi", "pip", "-", "packages", "." ]
ced728abff93551ba5677e63bc1bdc0ef5ca5777
https://github.com/theno/fabsetup/blob/ced728abff93551ba5677e63bc1bdc0ef5ca5777/fabsetup/addons.py#L99-L112
train
jaraco/jaraco.windows
jaraco/windows/lib.py
find_lib
def find_lib(lib): r""" Find the DLL for a given library. Accepts a string or loaded module >>> print(find_lib('kernel32').lower()) c:\windows\system32\kernel32.dll """ if isinstance(lib, str): lib = getattr(ctypes.windll, lib) size = 1024 result = ctypes.create_unicode_buffer(size) library.GetModuleFileName(lib._handle, result, size) return result.value
python
def find_lib(lib): r""" Find the DLL for a given library. Accepts a string or loaded module >>> print(find_lib('kernel32').lower()) c:\windows\system32\kernel32.dll """ if isinstance(lib, str): lib = getattr(ctypes.windll, lib) size = 1024 result = ctypes.create_unicode_buffer(size) library.GetModuleFileName(lib._handle, result, size) return result.value
[ "def", "find_lib", "(", "lib", ")", ":", "r", "if", "isinstance", "(", "lib", ",", "str", ")", ":", "lib", "=", "getattr", "(", "ctypes", ".", "windll", ",", "lib", ")", "size", "=", "1024", "result", "=", "ctypes", ".", "create_unicode_buffer", "(", "size", ")", "library", ".", "GetModuleFileName", "(", "lib", ".", "_handle", ",", "result", ",", "size", ")", "return", "result", ".", "value" ]
r""" Find the DLL for a given library. Accepts a string or loaded module >>> print(find_lib('kernel32').lower()) c:\windows\system32\kernel32.dll
[ "r", "Find", "the", "DLL", "for", "a", "given", "library", "." ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/lib.py#L6-L21
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.getScienceMetadataRDF
def getScienceMetadataRDF(self, pid): """ Get science metadata for a resource in XML+RDF format :param pid: The HydroShare ID of the resource :raises: HydroShareNotAuthorized if the user is not authorized to view the metadata. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException to signal an HTTP error. :return: A string representing the XML+RDF serialization of science metadata. Example of data XML+RDF returned: <?xml version="1.0"?> <!DOCTYPE rdf:RDF PUBLIC "-//DUBLIN CORE//DCMES DTD 2002/07/31//EN" "http://dublincore.org/documents/2002/07/31/dcmes-xml/dcmes-xml-dtd.dtd"> <rdf:RDF xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:hsterms="http://hydroshare.org/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:rdfs1="http://www.w3.org/2001/01/rdf-schema#"> <rdf:Description rdf:about="http://www.hydroshare.org/resource/87ffb608900e407ab4b67d30c93b329e"> <dc:title>Great Salt Lake Level and Volume</dc:title> <dc:type rdf:resource="http://www.hydroshare.org/terms/GenericResource"/> <dc:description> <rdf:Description> <dcterms:abstract>Time series of level, area and volume in the Great Salt Lake. Volume and area of the Great Salt Lake are derived from recorded levels</dcterms:abstract> </rdf:Description> </dc:description> <hsterms:awardInfo> <rdf:Description rdf:about="http://www.nsf.gov"> <hsterms:fundingAgencyName>National Science Foundation</hsterms:fundingAgencyName> <hsterms:awardTitle>Model Execution Cyberinfrastructure </hsterms:awardTitle> <hsterms:awardNumber>NSF_9087658_2017</hsterms:awardNumber> </rdf:Description> </hsterms:awardInfo> <dc:creator> <rdf:Description> <hsterms:name>John Smith</hsterms:name> <hsterms:creatorOrder>1</hsterms:creatorOrder> <hsterms:organization>Utah State University</hsterms:organization> <hsterms:email>[email protected]</hsterms:email> <hsterms:address>Engineering Building, USU, Logan, Utah</hsterms:address> <hsterms:phone rdf:resource="tel:435-797-8967"/> </rdf:Description> </dc:creator> <dc:creator> <rdf:Description> <hsterms:name>Lisa Miller</hsterms:name> <hsterms:creatorOrder>2</hsterms:creatorOrder> </rdf:Description> </dc:creator> <dc:contributor> <rdf:Description> <hsterms:name>Jenny Parker</hsterms:name> <hsterms:organization>Univesity of Utah</hsterms:organization> <hsterms:email>[email protected]</hsterms:email> </rdf:Description> </dc:contributor> <dc:coverage> <dcterms:period> <rdf:value>start=2000-01-01T00:00:00; end=2010-12-12T00:00:00; scheme=W3C-DTF</rdf:value> </dcterms:period> </dc:coverage> <dc:date> <dcterms:created> <rdf:value>2017-01-03T17:06:18.932217+00:00</rdf:value> </dcterms:created> </dc:date> <dc:date> <dcterms:modified> <rdf:value>2017-01-03T17:35:34.067279+00:00</rdf:value> </dcterms:modified> </dc:date> <dc:format>image/tiff</dc:format> <dc:identifier> <rdf:Description> <hsterms:hydroShareIdentifier>http://www.hydroshare.org/resource/87ffb608900e407ab4b67d30c93b329e</hsterms:hydroShareIdentifier> </rdf:Description> </dc:identifier> <dc:language>eng</dc:language> <dc:rights> <rdf:Description> <hsterms:rightsStatement>This resource is shared under the Creative Commons Attribution CC BY.</hsterms:rightsStatement> <hsterms:URL rdf:resource="http://creativecommons.org/licenses/by/4.0/"/> </rdf:Description> </dc:rights> <dc:subject>NSF</dc:subject> <dc:subject>Model</dc:subject> <dc:subject>Cyberinfrastructure</dc:subject> <hsterms:extendedMetadata> <rdf:Description> <hsterms:key>model</hsterms:key> <hsterms:value>ueb</hsterms:value> </rdf:Description> </hsterms:extendedMetadata> <hsterms:extendedMetadata> <rdf:Description> <hsterms:key>os</hsterms:key> <hsterms:value>windows</hsterms:value> </rdf:Description> </hsterms:extendedMetadata> </rdf:Description> <rdf:Description rdf:about="http://www.hydroshare.org/terms/GenericResource"> <rdfs1:label>Generic</rdfs1:label> <rdfs1:isDefinedBy>http://www.hydroshare.org/terms</rdfs1:isDefinedBy> </rdf:Description> </rdf:RDF> """ url = "{url_base}/scimeta/{pid}/".format(url_base=self.url_base, pid=pid) r = self._request('GET', url) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('GET', url)) elif r.status_code == 404: raise HydroShareNotFound((pid,)) else: raise HydroShareHTTPException((url, 'GET', r.status_code)) return str(r.content)
python
def getScienceMetadataRDF(self, pid): """ Get science metadata for a resource in XML+RDF format :param pid: The HydroShare ID of the resource :raises: HydroShareNotAuthorized if the user is not authorized to view the metadata. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException to signal an HTTP error. :return: A string representing the XML+RDF serialization of science metadata. Example of data XML+RDF returned: <?xml version="1.0"?> <!DOCTYPE rdf:RDF PUBLIC "-//DUBLIN CORE//DCMES DTD 2002/07/31//EN" "http://dublincore.org/documents/2002/07/31/dcmes-xml/dcmes-xml-dtd.dtd"> <rdf:RDF xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:hsterms="http://hydroshare.org/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:rdfs1="http://www.w3.org/2001/01/rdf-schema#"> <rdf:Description rdf:about="http://www.hydroshare.org/resource/87ffb608900e407ab4b67d30c93b329e"> <dc:title>Great Salt Lake Level and Volume</dc:title> <dc:type rdf:resource="http://www.hydroshare.org/terms/GenericResource"/> <dc:description> <rdf:Description> <dcterms:abstract>Time series of level, area and volume in the Great Salt Lake. Volume and area of the Great Salt Lake are derived from recorded levels</dcterms:abstract> </rdf:Description> </dc:description> <hsterms:awardInfo> <rdf:Description rdf:about="http://www.nsf.gov"> <hsterms:fundingAgencyName>National Science Foundation</hsterms:fundingAgencyName> <hsterms:awardTitle>Model Execution Cyberinfrastructure </hsterms:awardTitle> <hsterms:awardNumber>NSF_9087658_2017</hsterms:awardNumber> </rdf:Description> </hsterms:awardInfo> <dc:creator> <rdf:Description> <hsterms:name>John Smith</hsterms:name> <hsterms:creatorOrder>1</hsterms:creatorOrder> <hsterms:organization>Utah State University</hsterms:organization> <hsterms:email>[email protected]</hsterms:email> <hsterms:address>Engineering Building, USU, Logan, Utah</hsterms:address> <hsterms:phone rdf:resource="tel:435-797-8967"/> </rdf:Description> </dc:creator> <dc:creator> <rdf:Description> <hsterms:name>Lisa Miller</hsterms:name> <hsterms:creatorOrder>2</hsterms:creatorOrder> </rdf:Description> </dc:creator> <dc:contributor> <rdf:Description> <hsterms:name>Jenny Parker</hsterms:name> <hsterms:organization>Univesity of Utah</hsterms:organization> <hsterms:email>[email protected]</hsterms:email> </rdf:Description> </dc:contributor> <dc:coverage> <dcterms:period> <rdf:value>start=2000-01-01T00:00:00; end=2010-12-12T00:00:00; scheme=W3C-DTF</rdf:value> </dcterms:period> </dc:coverage> <dc:date> <dcterms:created> <rdf:value>2017-01-03T17:06:18.932217+00:00</rdf:value> </dcterms:created> </dc:date> <dc:date> <dcterms:modified> <rdf:value>2017-01-03T17:35:34.067279+00:00</rdf:value> </dcterms:modified> </dc:date> <dc:format>image/tiff</dc:format> <dc:identifier> <rdf:Description> <hsterms:hydroShareIdentifier>http://www.hydroshare.org/resource/87ffb608900e407ab4b67d30c93b329e</hsterms:hydroShareIdentifier> </rdf:Description> </dc:identifier> <dc:language>eng</dc:language> <dc:rights> <rdf:Description> <hsterms:rightsStatement>This resource is shared under the Creative Commons Attribution CC BY.</hsterms:rightsStatement> <hsterms:URL rdf:resource="http://creativecommons.org/licenses/by/4.0/"/> </rdf:Description> </dc:rights> <dc:subject>NSF</dc:subject> <dc:subject>Model</dc:subject> <dc:subject>Cyberinfrastructure</dc:subject> <hsterms:extendedMetadata> <rdf:Description> <hsterms:key>model</hsterms:key> <hsterms:value>ueb</hsterms:value> </rdf:Description> </hsterms:extendedMetadata> <hsterms:extendedMetadata> <rdf:Description> <hsterms:key>os</hsterms:key> <hsterms:value>windows</hsterms:value> </rdf:Description> </hsterms:extendedMetadata> </rdf:Description> <rdf:Description rdf:about="http://www.hydroshare.org/terms/GenericResource"> <rdfs1:label>Generic</rdfs1:label> <rdfs1:isDefinedBy>http://www.hydroshare.org/terms</rdfs1:isDefinedBy> </rdf:Description> </rdf:RDF> """ url = "{url_base}/scimeta/{pid}/".format(url_base=self.url_base, pid=pid) r = self._request('GET', url) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('GET', url)) elif r.status_code == 404: raise HydroShareNotFound((pid,)) else: raise HydroShareHTTPException((url, 'GET', r.status_code)) return str(r.content)
[ "def", "getScienceMetadataRDF", "(", "self", ",", "pid", ")", ":", "url", "=", "\"{url_base}/scimeta/{pid}/\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ",", "pid", "=", "pid", ")", "r", "=", "self", ".", "_request", "(", "'GET'", ",", "url", ")", "if", "r", ".", "status_code", "!=", "200", ":", "if", "r", ".", "status_code", "==", "403", ":", "raise", "HydroShareNotAuthorized", "(", "(", "'GET'", ",", "url", ")", ")", "elif", "r", ".", "status_code", "==", "404", ":", "raise", "HydroShareNotFound", "(", "(", "pid", ",", ")", ")", "else", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'GET'", ",", "r", ".", "status_code", ")", ")", "return", "str", "(", "r", ".", "content", ")" ]
Get science metadata for a resource in XML+RDF format :param pid: The HydroShare ID of the resource :raises: HydroShareNotAuthorized if the user is not authorized to view the metadata. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException to signal an HTTP error. :return: A string representing the XML+RDF serialization of science metadata. Example of data XML+RDF returned: <?xml version="1.0"?> <!DOCTYPE rdf:RDF PUBLIC "-//DUBLIN CORE//DCMES DTD 2002/07/31//EN" "http://dublincore.org/documents/2002/07/31/dcmes-xml/dcmes-xml-dtd.dtd"> <rdf:RDF xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:hsterms="http://hydroshare.org/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:rdfs1="http://www.w3.org/2001/01/rdf-schema#"> <rdf:Description rdf:about="http://www.hydroshare.org/resource/87ffb608900e407ab4b67d30c93b329e"> <dc:title>Great Salt Lake Level and Volume</dc:title> <dc:type rdf:resource="http://www.hydroshare.org/terms/GenericResource"/> <dc:description> <rdf:Description> <dcterms:abstract>Time series of level, area and volume in the Great Salt Lake. Volume and area of the Great Salt Lake are derived from recorded levels</dcterms:abstract> </rdf:Description> </dc:description> <hsterms:awardInfo> <rdf:Description rdf:about="http://www.nsf.gov"> <hsterms:fundingAgencyName>National Science Foundation</hsterms:fundingAgencyName> <hsterms:awardTitle>Model Execution Cyberinfrastructure </hsterms:awardTitle> <hsterms:awardNumber>NSF_9087658_2017</hsterms:awardNumber> </rdf:Description> </hsterms:awardInfo> <dc:creator> <rdf:Description> <hsterms:name>John Smith</hsterms:name> <hsterms:creatorOrder>1</hsterms:creatorOrder> <hsterms:organization>Utah State University</hsterms:organization> <hsterms:email>[email protected]</hsterms:email> <hsterms:address>Engineering Building, USU, Logan, Utah</hsterms:address> <hsterms:phone rdf:resource="tel:435-797-8967"/> </rdf:Description> </dc:creator> <dc:creator> <rdf:Description> <hsterms:name>Lisa Miller</hsterms:name> <hsterms:creatorOrder>2</hsterms:creatorOrder> </rdf:Description> </dc:creator> <dc:contributor> <rdf:Description> <hsterms:name>Jenny Parker</hsterms:name> <hsterms:organization>Univesity of Utah</hsterms:organization> <hsterms:email>[email protected]</hsterms:email> </rdf:Description> </dc:contributor> <dc:coverage> <dcterms:period> <rdf:value>start=2000-01-01T00:00:00; end=2010-12-12T00:00:00; scheme=W3C-DTF</rdf:value> </dcterms:period> </dc:coverage> <dc:date> <dcterms:created> <rdf:value>2017-01-03T17:06:18.932217+00:00</rdf:value> </dcterms:created> </dc:date> <dc:date> <dcterms:modified> <rdf:value>2017-01-03T17:35:34.067279+00:00</rdf:value> </dcterms:modified> </dc:date> <dc:format>image/tiff</dc:format> <dc:identifier> <rdf:Description> <hsterms:hydroShareIdentifier>http://www.hydroshare.org/resource/87ffb608900e407ab4b67d30c93b329e</hsterms:hydroShareIdentifier> </rdf:Description> </dc:identifier> <dc:language>eng</dc:language> <dc:rights> <rdf:Description> <hsterms:rightsStatement>This resource is shared under the Creative Commons Attribution CC BY.</hsterms:rightsStatement> <hsterms:URL rdf:resource="http://creativecommons.org/licenses/by/4.0/"/> </rdf:Description> </dc:rights> <dc:subject>NSF</dc:subject> <dc:subject>Model</dc:subject> <dc:subject>Cyberinfrastructure</dc:subject> <hsterms:extendedMetadata> <rdf:Description> <hsterms:key>model</hsterms:key> <hsterms:value>ueb</hsterms:value> </rdf:Description> </hsterms:extendedMetadata> <hsterms:extendedMetadata> <rdf:Description> <hsterms:key>os</hsterms:key> <hsterms:value>windows</hsterms:value> </rdf:Description> </hsterms:extendedMetadata> </rdf:Description> <rdf:Description rdf:about="http://www.hydroshare.org/terms/GenericResource"> <rdfs1:label>Generic</rdfs1:label> <rdfs1:isDefinedBy>http://www.hydroshare.org/terms</rdfs1:isDefinedBy> </rdf:Description> </rdf:RDF>
[ "Get", "science", "metadata", "for", "a", "resource", "in", "XML", "+", "RDF", "format" ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L245-L358
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.getResource
def getResource(self, pid, destination=None, unzip=False, wait_for_bag_creation=True): """ Get a resource in BagIt format :param pid: The HydroShare ID of the resource :param destination: String representing the directory to save bag to. Bag will be saved to file named $(PID).zip in destination; existing file of the same name will be overwritten. If None, a stream to the zipped bag will be returned instead. :param unzip: True if the bag should be unzipped when saved to destination. Bag contents to be saved to directory named $(PID) residing in destination. Only applies when destination is not None. :param wait_for_bag_creation: True if to wait to download the bag in case the bag is not ready (bag needs to be recreated before it can be downloaded). :raises: HydroShareArgumentException if any arguments are invalid. :raises: HydroShareNotAuthorized if the user is not authorized to access the resource. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException to signal an HTTP error :raise: HydroShareBagNotReady if the bag is not ready to be downloaded and wait_for_bag_creation is False :return: None if the bag was saved directly to disk. Or a generator representing a buffered stream of the bytes comprising the bag returned by the REST end point. """ stream = self._getBagStream(pid, wait_for_bag_creation) if destination: self._storeBagOnFilesystem(stream, pid, destination, unzip) return None else: return stream
python
def getResource(self, pid, destination=None, unzip=False, wait_for_bag_creation=True): """ Get a resource in BagIt format :param pid: The HydroShare ID of the resource :param destination: String representing the directory to save bag to. Bag will be saved to file named $(PID).zip in destination; existing file of the same name will be overwritten. If None, a stream to the zipped bag will be returned instead. :param unzip: True if the bag should be unzipped when saved to destination. Bag contents to be saved to directory named $(PID) residing in destination. Only applies when destination is not None. :param wait_for_bag_creation: True if to wait to download the bag in case the bag is not ready (bag needs to be recreated before it can be downloaded). :raises: HydroShareArgumentException if any arguments are invalid. :raises: HydroShareNotAuthorized if the user is not authorized to access the resource. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException to signal an HTTP error :raise: HydroShareBagNotReady if the bag is not ready to be downloaded and wait_for_bag_creation is False :return: None if the bag was saved directly to disk. Or a generator representing a buffered stream of the bytes comprising the bag returned by the REST end point. """ stream = self._getBagStream(pid, wait_for_bag_creation) if destination: self._storeBagOnFilesystem(stream, pid, destination, unzip) return None else: return stream
[ "def", "getResource", "(", "self", ",", "pid", ",", "destination", "=", "None", ",", "unzip", "=", "False", ",", "wait_for_bag_creation", "=", "True", ")", ":", "stream", "=", "self", ".", "_getBagStream", "(", "pid", ",", "wait_for_bag_creation", ")", "if", "destination", ":", "self", ".", "_storeBagOnFilesystem", "(", "stream", ",", "pid", ",", "destination", ",", "unzip", ")", "return", "None", "else", ":", "return", "stream" ]
Get a resource in BagIt format :param pid: The HydroShare ID of the resource :param destination: String representing the directory to save bag to. Bag will be saved to file named $(PID).zip in destination; existing file of the same name will be overwritten. If None, a stream to the zipped bag will be returned instead. :param unzip: True if the bag should be unzipped when saved to destination. Bag contents to be saved to directory named $(PID) residing in destination. Only applies when destination is not None. :param wait_for_bag_creation: True if to wait to download the bag in case the bag is not ready (bag needs to be recreated before it can be downloaded). :raises: HydroShareArgumentException if any arguments are invalid. :raises: HydroShareNotAuthorized if the user is not authorized to access the resource. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException to signal an HTTP error :raise: HydroShareBagNotReady if the bag is not ready to be downloaded and wait_for_bag_creation is False :return: None if the bag was saved directly to disk. Or a generator representing a buffered stream of the bytes comprising the bag returned by the REST end point.
[ "Get", "a", "resource", "in", "BagIt", "format" ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L567-L594
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.getResourceTypes
def getResourceTypes(self): """ Get the list of resource types supported by the HydroShare server :return: A set of strings representing the HydroShare resource types :raises: HydroShareHTTPException to signal an HTTP error """ url = "{url_base}/resource/types".format(url_base=self.url_base) r = self._request('GET', url) if r.status_code != 200: raise HydroShareHTTPException((url, 'GET', r.status_code)) resource_types = r.json() return set([t['resource_type'] for t in resource_types])
python
def getResourceTypes(self): """ Get the list of resource types supported by the HydroShare server :return: A set of strings representing the HydroShare resource types :raises: HydroShareHTTPException to signal an HTTP error """ url = "{url_base}/resource/types".format(url_base=self.url_base) r = self._request('GET', url) if r.status_code != 200: raise HydroShareHTTPException((url, 'GET', r.status_code)) resource_types = r.json() return set([t['resource_type'] for t in resource_types])
[ "def", "getResourceTypes", "(", "self", ")", ":", "url", "=", "\"{url_base}/resource/types\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ")", "r", "=", "self", ".", "_request", "(", "'GET'", ",", "url", ")", "if", "r", ".", "status_code", "!=", "200", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'GET'", ",", "r", ".", "status_code", ")", ")", "resource_types", "=", "r", ".", "json", "(", ")", "return", "set", "(", "[", "t", "[", "'resource_type'", "]", "for", "t", "in", "resource_types", "]", ")" ]
Get the list of resource types supported by the HydroShare server :return: A set of strings representing the HydroShare resource types :raises: HydroShareHTTPException to signal an HTTP error
[ "Get", "the", "list", "of", "resource", "types", "supported", "by", "the", "HydroShare", "server" ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L668-L682
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.createResource
def createResource(self, resource_type, title, resource_file=None, resource_filename=None, abstract=None, keywords=None, edit_users=None, view_users=None, edit_groups=None, view_groups=None, metadata=None, extra_metadata=None, progress_callback=None): """ Create a new resource. :param resource_type: string representing the a HydroShare resource type recognized by this server. :param title: string representing the title of the new resource :param resource_file: a read-only binary file-like object (i.e. opened with the flag 'rb') or a string representing path to file to be uploaded as part of the new resource :param resource_filename: string representing the filename of the resource file. Must be specified if resource_file is a file-like object. If resource_file is a string representing a valid file path, and resource_filename is not specified, resource_filename will be equal to os.path.basename(resource_file). is a string :param abstract: string representing abstract of resource :param keywords: list of strings representing keywords to associate with the resource :param edit_users: list of HydroShare usernames who will be given edit permissions :param view_users: list of HydroShare usernames who will be given view permissions :param edit_groups: list of HydroShare group names that will be given edit permissions :param view_groups: list of HydroShare group names that will be given view permissions :param metadata: json string data for each of the metadata elements :param extra_metadata: json string data for key/value pair metadata elements defined by user :param progress_callback: user-defined function to provide feedback to the user about the progress of the upload of resource_file. For more information, see: http://toolbelt.readthedocs.org/en/latest/uploading-data.html#monitoring-your-streaming-multipart-upload :return: string representing ID of newly created resource. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/".format(url_base=self.url_base) close_fd = False if resource_type not in self.resource_types: raise HydroShareArgumentException("Resource type {0} is not among known resources: {1}".format(resource_type, ", ".join([r for r in self.resource_types]))) # Prepare request params = {'resource_type': resource_type, 'title': title} if abstract: params['abstract'] = abstract if keywords: # Put keywords in a format that django-rest's serializer will understand for (i, kw) in enumerate(keywords): key = "keywords[{index}]".format(index=i) params[key] = kw if edit_users: params['edit_users'] = edit_users if view_users: params['view_users'] = view_users if edit_groups: params['edit_groups'] = edit_groups if view_groups: params['view_groups'] = view_groups if metadata: params['metadata'] = metadata if extra_metadata: params['extra_metadata'] = extra_metadata if resource_file: close_fd = self._prepareFileForUpload(params, resource_file, resource_filename) encoder = MultipartEncoder(params) if progress_callback is None: progress_callback = default_progress_callback monitor = MultipartEncoderMonitor(encoder, progress_callback) r = self._request('POST', url, data=monitor, headers={'Content-Type': monitor.content_type}) if close_fd: fd = params['file'][1] fd.close() if r.status_code != 201: if r.status_code == 403: raise HydroShareNotAuthorized(('POST', url)) else: raise HydroShareHTTPException((url, 'POST', r.status_code, params)) response = r.json() new_resource_id = response['resource_id'] return new_resource_id
python
def createResource(self, resource_type, title, resource_file=None, resource_filename=None, abstract=None, keywords=None, edit_users=None, view_users=None, edit_groups=None, view_groups=None, metadata=None, extra_metadata=None, progress_callback=None): """ Create a new resource. :param resource_type: string representing the a HydroShare resource type recognized by this server. :param title: string representing the title of the new resource :param resource_file: a read-only binary file-like object (i.e. opened with the flag 'rb') or a string representing path to file to be uploaded as part of the new resource :param resource_filename: string representing the filename of the resource file. Must be specified if resource_file is a file-like object. If resource_file is a string representing a valid file path, and resource_filename is not specified, resource_filename will be equal to os.path.basename(resource_file). is a string :param abstract: string representing abstract of resource :param keywords: list of strings representing keywords to associate with the resource :param edit_users: list of HydroShare usernames who will be given edit permissions :param view_users: list of HydroShare usernames who will be given view permissions :param edit_groups: list of HydroShare group names that will be given edit permissions :param view_groups: list of HydroShare group names that will be given view permissions :param metadata: json string data for each of the metadata elements :param extra_metadata: json string data for key/value pair metadata elements defined by user :param progress_callback: user-defined function to provide feedback to the user about the progress of the upload of resource_file. For more information, see: http://toolbelt.readthedocs.org/en/latest/uploading-data.html#monitoring-your-streaming-multipart-upload :return: string representing ID of newly created resource. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/".format(url_base=self.url_base) close_fd = False if resource_type not in self.resource_types: raise HydroShareArgumentException("Resource type {0} is not among known resources: {1}".format(resource_type, ", ".join([r for r in self.resource_types]))) # Prepare request params = {'resource_type': resource_type, 'title': title} if abstract: params['abstract'] = abstract if keywords: # Put keywords in a format that django-rest's serializer will understand for (i, kw) in enumerate(keywords): key = "keywords[{index}]".format(index=i) params[key] = kw if edit_users: params['edit_users'] = edit_users if view_users: params['view_users'] = view_users if edit_groups: params['edit_groups'] = edit_groups if view_groups: params['view_groups'] = view_groups if metadata: params['metadata'] = metadata if extra_metadata: params['extra_metadata'] = extra_metadata if resource_file: close_fd = self._prepareFileForUpload(params, resource_file, resource_filename) encoder = MultipartEncoder(params) if progress_callback is None: progress_callback = default_progress_callback monitor = MultipartEncoderMonitor(encoder, progress_callback) r = self._request('POST', url, data=monitor, headers={'Content-Type': monitor.content_type}) if close_fd: fd = params['file'][1] fd.close() if r.status_code != 201: if r.status_code == 403: raise HydroShareNotAuthorized(('POST', url)) else: raise HydroShareHTTPException((url, 'POST', r.status_code, params)) response = r.json() new_resource_id = response['resource_id'] return new_resource_id
[ "def", "createResource", "(", "self", ",", "resource_type", ",", "title", ",", "resource_file", "=", "None", ",", "resource_filename", "=", "None", ",", "abstract", "=", "None", ",", "keywords", "=", "None", ",", "edit_users", "=", "None", ",", "view_users", "=", "None", ",", "edit_groups", "=", "None", ",", "view_groups", "=", "None", ",", "metadata", "=", "None", ",", "extra_metadata", "=", "None", ",", "progress_callback", "=", "None", ")", ":", "url", "=", "\"{url_base}/resource/\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ")", "close_fd", "=", "False", "if", "resource_type", "not", "in", "self", ".", "resource_types", ":", "raise", "HydroShareArgumentException", "(", "\"Resource type {0} is not among known resources: {1}\"", ".", "format", "(", "resource_type", ",", "\", \"", ".", "join", "(", "[", "r", "for", "r", "in", "self", ".", "resource_types", "]", ")", ")", ")", "params", "=", "{", "'resource_type'", ":", "resource_type", ",", "'title'", ":", "title", "}", "if", "abstract", ":", "params", "[", "'abstract'", "]", "=", "abstract", "if", "keywords", ":", "for", "(", "i", ",", "kw", ")", "in", "enumerate", "(", "keywords", ")", ":", "key", "=", "\"keywords[{index}]\"", ".", "format", "(", "index", "=", "i", ")", "params", "[", "key", "]", "=", "kw", "if", "edit_users", ":", "params", "[", "'edit_users'", "]", "=", "edit_users", "if", "view_users", ":", "params", "[", "'view_users'", "]", "=", "view_users", "if", "edit_groups", ":", "params", "[", "'edit_groups'", "]", "=", "edit_groups", "if", "view_groups", ":", "params", "[", "'view_groups'", "]", "=", "view_groups", "if", "metadata", ":", "params", "[", "'metadata'", "]", "=", "metadata", "if", "extra_metadata", ":", "params", "[", "'extra_metadata'", "]", "=", "extra_metadata", "if", "resource_file", ":", "close_fd", "=", "self", ".", "_prepareFileForUpload", "(", "params", ",", "resource_file", ",", "resource_filename", ")", "encoder", "=", "MultipartEncoder", "(", "params", ")", "if", "progress_callback", "is", "None", ":", "progress_callback", "=", "default_progress_callback", "monitor", "=", "MultipartEncoderMonitor", "(", "encoder", ",", "progress_callback", ")", "r", "=", "self", ".", "_request", "(", "'POST'", ",", "url", ",", "data", "=", "monitor", ",", "headers", "=", "{", "'Content-Type'", ":", "monitor", ".", "content_type", "}", ")", "if", "close_fd", ":", "fd", "=", "params", "[", "'file'", "]", "[", "1", "]", "fd", ".", "close", "(", ")", "if", "r", ".", "status_code", "!=", "201", ":", "if", "r", ".", "status_code", "==", "403", ":", "raise", "HydroShareNotAuthorized", "(", "(", "'POST'", ",", "url", ")", ")", "else", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'POST'", ",", "r", ".", "status_code", ",", "params", ")", ")", "response", "=", "r", ".", "json", "(", ")", "new_resource_id", "=", "response", "[", "'resource_id'", "]", "return", "new_resource_id" ]
Create a new resource. :param resource_type: string representing the a HydroShare resource type recognized by this server. :param title: string representing the title of the new resource :param resource_file: a read-only binary file-like object (i.e. opened with the flag 'rb') or a string representing path to file to be uploaded as part of the new resource :param resource_filename: string representing the filename of the resource file. Must be specified if resource_file is a file-like object. If resource_file is a string representing a valid file path, and resource_filename is not specified, resource_filename will be equal to os.path.basename(resource_file). is a string :param abstract: string representing abstract of resource :param keywords: list of strings representing keywords to associate with the resource :param edit_users: list of HydroShare usernames who will be given edit permissions :param view_users: list of HydroShare usernames who will be given view permissions :param edit_groups: list of HydroShare group names that will be given edit permissions :param view_groups: list of HydroShare group names that will be given view permissions :param metadata: json string data for each of the metadata elements :param extra_metadata: json string data for key/value pair metadata elements defined by user :param progress_callback: user-defined function to provide feedback to the user about the progress of the upload of resource_file. For more information, see: http://toolbelt.readthedocs.org/en/latest/uploading-data.html#monitoring-your-streaming-multipart-upload :return: string representing ID of newly created resource. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered.
[ "Create", "a", "new", "resource", "." ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L684-L773
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.setAccessRules
def setAccessRules(self, pid, public=False): """ Set access rules for a resource. Current only allows for setting the public or private setting. :param pid: The HydroShare ID of the resource :param public: True if the resource should be made public. """ url = "{url_base}/resource/accessRules/{pid}/".format(url_base=self.url_base, pid=pid) params = {'public': public} r = self._request('PUT', url, data=params) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('PUT', url)) elif r.status_code == 404: raise HydroShareNotFound((pid,)) else: raise HydroShareHTTPException((url, 'PUT', r.status_code, params)) resource = r.json() assert(resource['resource_id'] == pid) return resource['resource_id']
python
def setAccessRules(self, pid, public=False): """ Set access rules for a resource. Current only allows for setting the public or private setting. :param pid: The HydroShare ID of the resource :param public: True if the resource should be made public. """ url = "{url_base}/resource/accessRules/{pid}/".format(url_base=self.url_base, pid=pid) params = {'public': public} r = self._request('PUT', url, data=params) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('PUT', url)) elif r.status_code == 404: raise HydroShareNotFound((pid,)) else: raise HydroShareHTTPException((url, 'PUT', r.status_code, params)) resource = r.json() assert(resource['resource_id'] == pid) return resource['resource_id']
[ "def", "setAccessRules", "(", "self", ",", "pid", ",", "public", "=", "False", ")", ":", "url", "=", "\"{url_base}/resource/accessRules/{pid}/\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ",", "pid", "=", "pid", ")", "params", "=", "{", "'public'", ":", "public", "}", "r", "=", "self", ".", "_request", "(", "'PUT'", ",", "url", ",", "data", "=", "params", ")", "if", "r", ".", "status_code", "!=", "200", ":", "if", "r", ".", "status_code", "==", "403", ":", "raise", "HydroShareNotAuthorized", "(", "(", "'PUT'", ",", "url", ")", ")", "elif", "r", ".", "status_code", "==", "404", ":", "raise", "HydroShareNotFound", "(", "(", "pid", ",", ")", ")", "else", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'PUT'", ",", "r", ".", "status_code", ",", "params", ")", ")", "resource", "=", "r", ".", "json", "(", ")", "assert", "(", "resource", "[", "'resource_id'", "]", "==", "pid", ")", "return", "resource", "[", "'resource_id'", "]" ]
Set access rules for a resource. Current only allows for setting the public or private setting. :param pid: The HydroShare ID of the resource :param public: True if the resource should be made public.
[ "Set", "access", "rules", "for", "a", "resource", ".", "Current", "only", "allows", "for", "setting", "the", "public", "or", "private", "setting", "." ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L797-L819
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.addResourceFile
def addResourceFile(self, pid, resource_file, resource_filename=None, progress_callback=None): """ Add a new file to an existing resource :param pid: The HydroShare ID of the resource :param resource_file: a read-only binary file-like object (i.e. opened with the flag 'rb') or a string representing path to file to be uploaded as part of the new resource :param resource_filename: string representing the filename of the resource file. Must be specified if resource_file is a file-like object. If resource_file is a string representing a valid file path, and resource_filename is not specified, resource_filename will be equal to os.path.basename(resource_file). is a string :param progress_callback: user-defined function to provide feedback to the user about the progress of the upload of resource_file. For more information, see: http://toolbelt.readthedocs.org/en/latest/uploading-data.html#monitoring-your-streaming-multipart-upload :return: Dictionary containing 'resource_id' the ID of the resource to which the file was added, and 'file_name' the filename of the file added. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/{pid}/files/".format(url_base=self.url_base, pid=pid) params = {} close_fd = self._prepareFileForUpload(params, resource_file, resource_filename) encoder = MultipartEncoder(params) if progress_callback is None: progress_callback = default_progress_callback monitor = MultipartEncoderMonitor(encoder, progress_callback) r = self._request('POST', url, data=monitor, headers={'Content-Type': monitor.content_type}) if close_fd: fd = params['file'][1] fd.close() if r.status_code != 201: if r.status_code == 403: raise HydroShareNotAuthorized(('POST', url)) elif r.status_code == 404: raise HydroShareNotFound((pid,)) else: raise HydroShareHTTPException((url, 'POST', r.status_code)) response = r.json() # assert(response['resource_id'] == pid) return response
python
def addResourceFile(self, pid, resource_file, resource_filename=None, progress_callback=None): """ Add a new file to an existing resource :param pid: The HydroShare ID of the resource :param resource_file: a read-only binary file-like object (i.e. opened with the flag 'rb') or a string representing path to file to be uploaded as part of the new resource :param resource_filename: string representing the filename of the resource file. Must be specified if resource_file is a file-like object. If resource_file is a string representing a valid file path, and resource_filename is not specified, resource_filename will be equal to os.path.basename(resource_file). is a string :param progress_callback: user-defined function to provide feedback to the user about the progress of the upload of resource_file. For more information, see: http://toolbelt.readthedocs.org/en/latest/uploading-data.html#monitoring-your-streaming-multipart-upload :return: Dictionary containing 'resource_id' the ID of the resource to which the file was added, and 'file_name' the filename of the file added. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/{pid}/files/".format(url_base=self.url_base, pid=pid) params = {} close_fd = self._prepareFileForUpload(params, resource_file, resource_filename) encoder = MultipartEncoder(params) if progress_callback is None: progress_callback = default_progress_callback monitor = MultipartEncoderMonitor(encoder, progress_callback) r = self._request('POST', url, data=monitor, headers={'Content-Type': monitor.content_type}) if close_fd: fd = params['file'][1] fd.close() if r.status_code != 201: if r.status_code == 403: raise HydroShareNotAuthorized(('POST', url)) elif r.status_code == 404: raise HydroShareNotFound((pid,)) else: raise HydroShareHTTPException((url, 'POST', r.status_code)) response = r.json() # assert(response['resource_id'] == pid) return response
[ "def", "addResourceFile", "(", "self", ",", "pid", ",", "resource_file", ",", "resource_filename", "=", "None", ",", "progress_callback", "=", "None", ")", ":", "url", "=", "\"{url_base}/resource/{pid}/files/\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ",", "pid", "=", "pid", ")", "params", "=", "{", "}", "close_fd", "=", "self", ".", "_prepareFileForUpload", "(", "params", ",", "resource_file", ",", "resource_filename", ")", "encoder", "=", "MultipartEncoder", "(", "params", ")", "if", "progress_callback", "is", "None", ":", "progress_callback", "=", "default_progress_callback", "monitor", "=", "MultipartEncoderMonitor", "(", "encoder", ",", "progress_callback", ")", "r", "=", "self", ".", "_request", "(", "'POST'", ",", "url", ",", "data", "=", "monitor", ",", "headers", "=", "{", "'Content-Type'", ":", "monitor", ".", "content_type", "}", ")", "if", "close_fd", ":", "fd", "=", "params", "[", "'file'", "]", "[", "1", "]", "fd", ".", "close", "(", ")", "if", "r", ".", "status_code", "!=", "201", ":", "if", "r", ".", "status_code", "==", "403", ":", "raise", "HydroShareNotAuthorized", "(", "(", "'POST'", ",", "url", ")", ")", "elif", "r", ".", "status_code", "==", "404", ":", "raise", "HydroShareNotFound", "(", "(", "pid", ",", ")", ")", "else", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'POST'", ",", "r", ".", "status_code", ")", ")", "response", "=", "r", ".", "json", "(", ")", "return", "response" ]
Add a new file to an existing resource :param pid: The HydroShare ID of the resource :param resource_file: a read-only binary file-like object (i.e. opened with the flag 'rb') or a string representing path to file to be uploaded as part of the new resource :param resource_filename: string representing the filename of the resource file. Must be specified if resource_file is a file-like object. If resource_file is a string representing a valid file path, and resource_filename is not specified, resource_filename will be equal to os.path.basename(resource_file). is a string :param progress_callback: user-defined function to provide feedback to the user about the progress of the upload of resource_file. For more information, see: http://toolbelt.readthedocs.org/en/latest/uploading-data.html#monitoring-your-streaming-multipart-upload :return: Dictionary containing 'resource_id' the ID of the resource to which the file was added, and 'file_name' the filename of the file added. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered.
[ "Add", "a", "new", "file", "to", "an", "existing", "resource" ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L821-L870
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.getResourceFile
def getResourceFile(self, pid, filename, destination=None): """ Get a file within a resource. :param pid: The HydroShare ID of the resource :param filename: String representing the name of the resource file to get. :param destination: String representing the directory to save the resource file to. If None, a stream to the resource file will be returned instead. :return: The path of the downloaded file (if destination was specified), or a stream to the resource file. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/{pid}/files/{filename}".format(url_base=self.url_base, pid=pid, filename=filename) if destination: if not os.path.isdir(destination): raise HydroShareArgumentException("{0} is not a directory.".format(destination)) if not os.access(destination, os.W_OK): raise HydroShareArgumentException("You do not have write permissions to directory '{0}'.".format(destination)) r = self._request('GET', url, stream=True) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('GET', url)) elif r.status_code == 404: raise HydroShareNotFound((pid, filename)) else: raise HydroShareHTTPException((url, 'GET', r.status_code)) if destination is None: return r.iter_content(STREAM_CHUNK_SIZE) else: filepath = os.path.join(destination, filename) with open(filepath, 'wb') as fd: for chunk in r.iter_content(STREAM_CHUNK_SIZE): fd.write(chunk) return filepath
python
def getResourceFile(self, pid, filename, destination=None): """ Get a file within a resource. :param pid: The HydroShare ID of the resource :param filename: String representing the name of the resource file to get. :param destination: String representing the directory to save the resource file to. If None, a stream to the resource file will be returned instead. :return: The path of the downloaded file (if destination was specified), or a stream to the resource file. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/{pid}/files/{filename}".format(url_base=self.url_base, pid=pid, filename=filename) if destination: if not os.path.isdir(destination): raise HydroShareArgumentException("{0} is not a directory.".format(destination)) if not os.access(destination, os.W_OK): raise HydroShareArgumentException("You do not have write permissions to directory '{0}'.".format(destination)) r = self._request('GET', url, stream=True) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('GET', url)) elif r.status_code == 404: raise HydroShareNotFound((pid, filename)) else: raise HydroShareHTTPException((url, 'GET', r.status_code)) if destination is None: return r.iter_content(STREAM_CHUNK_SIZE) else: filepath = os.path.join(destination, filename) with open(filepath, 'wb') as fd: for chunk in r.iter_content(STREAM_CHUNK_SIZE): fd.write(chunk) return filepath
[ "def", "getResourceFile", "(", "self", ",", "pid", ",", "filename", ",", "destination", "=", "None", ")", ":", "url", "=", "\"{url_base}/resource/{pid}/files/{filename}\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ",", "pid", "=", "pid", ",", "filename", "=", "filename", ")", "if", "destination", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "destination", ")", ":", "raise", "HydroShareArgumentException", "(", "\"{0} is not a directory.\"", ".", "format", "(", "destination", ")", ")", "if", "not", "os", ".", "access", "(", "destination", ",", "os", ".", "W_OK", ")", ":", "raise", "HydroShareArgumentException", "(", "\"You do not have write permissions to directory '{0}'.\"", ".", "format", "(", "destination", ")", ")", "r", "=", "self", ".", "_request", "(", "'GET'", ",", "url", ",", "stream", "=", "True", ")", "if", "r", ".", "status_code", "!=", "200", ":", "if", "r", ".", "status_code", "==", "403", ":", "raise", "HydroShareNotAuthorized", "(", "(", "'GET'", ",", "url", ")", ")", "elif", "r", ".", "status_code", "==", "404", ":", "raise", "HydroShareNotFound", "(", "(", "pid", ",", "filename", ")", ")", "else", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'GET'", ",", "r", ".", "status_code", ")", ")", "if", "destination", "is", "None", ":", "return", "r", ".", "iter_content", "(", "STREAM_CHUNK_SIZE", ")", "else", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "destination", ",", "filename", ")", "with", "open", "(", "filepath", ",", "'wb'", ")", "as", "fd", ":", "for", "chunk", "in", "r", ".", "iter_content", "(", "STREAM_CHUNK_SIZE", ")", ":", "fd", ".", "write", "(", "chunk", ")", "return", "filepath" ]
Get a file within a resource. :param pid: The HydroShare ID of the resource :param filename: String representing the name of the resource file to get. :param destination: String representing the directory to save the resource file to. If None, a stream to the resource file will be returned instead. :return: The path of the downloaded file (if destination was specified), or a stream to the resource file. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered.
[ "Get", "a", "file", "within", "a", "resource", "." ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L872-L913
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.deleteResourceFile
def deleteResourceFile(self, pid, filename): """ Delete a resource file :param pid: The HydroShare ID of the resource :param filename: String representing the name of the resource file to delete :return: Dictionary containing 'resource_id' the ID of the resource from which the file was deleted, and 'file_name' the filename of the file deleted. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource or resource file was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/{pid}/files/{filename}".format(url_base=self.url_base, pid=pid, filename=filename) r = self._request('DELETE', url) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('DELETE', url)) elif r.status_code == 404: raise HydroShareNotFound((pid, filename)) else: raise HydroShareHTTPException((url, 'DELETE', r.status_code)) response = r.json() assert(response['resource_id'] == pid) return response['resource_id']
python
def deleteResourceFile(self, pid, filename): """ Delete a resource file :param pid: The HydroShare ID of the resource :param filename: String representing the name of the resource file to delete :return: Dictionary containing 'resource_id' the ID of the resource from which the file was deleted, and 'file_name' the filename of the file deleted. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource or resource file was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. """ url = "{url_base}/resource/{pid}/files/{filename}".format(url_base=self.url_base, pid=pid, filename=filename) r = self._request('DELETE', url) if r.status_code != 200: if r.status_code == 403: raise HydroShareNotAuthorized(('DELETE', url)) elif r.status_code == 404: raise HydroShareNotFound((pid, filename)) else: raise HydroShareHTTPException((url, 'DELETE', r.status_code)) response = r.json() assert(response['resource_id'] == pid) return response['resource_id']
[ "def", "deleteResourceFile", "(", "self", ",", "pid", ",", "filename", ")", ":", "url", "=", "\"{url_base}/resource/{pid}/files/{filename}\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ",", "pid", "=", "pid", ",", "filename", "=", "filename", ")", "r", "=", "self", ".", "_request", "(", "'DELETE'", ",", "url", ")", "if", "r", ".", "status_code", "!=", "200", ":", "if", "r", ".", "status_code", "==", "403", ":", "raise", "HydroShareNotAuthorized", "(", "(", "'DELETE'", ",", "url", ")", ")", "elif", "r", ".", "status_code", "==", "404", ":", "raise", "HydroShareNotFound", "(", "(", "pid", ",", "filename", ")", ")", "else", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'DELETE'", ",", "r", ".", "status_code", ")", ")", "response", "=", "r", ".", "json", "(", ")", "assert", "(", "response", "[", "'resource_id'", "]", "==", "pid", ")", "return", "response", "[", "'resource_id'", "]" ]
Delete a resource file :param pid: The HydroShare ID of the resource :param filename: String representing the name of the resource file to delete :return: Dictionary containing 'resource_id' the ID of the resource from which the file was deleted, and 'file_name' the filename of the file deleted. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource or resource file was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered.
[ "Delete", "a", "resource", "file" ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L915-L944
train
hydroshare/hs_restclient
hs_restclient/__init__.py
HydroShare.getResourceFileList
def getResourceFileList(self, pid): """ Get a listing of files within a resource. :param pid: The HydroShare ID of the resource whose resource files are to be listed. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. :return: A generator that can be used to fetch dict objects, each dict representing the JSON object representation of the resource returned by the REST end point. For example: { "count": 95, "next": "https://www.hydroshare.org/hsapi/resource/32a08bc23a86e471282a832143491b49/file_list/?page=2", "previous": null, "results": [ { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/foo/bar.txt", "size": 23550, "content_type": "text/plain" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/dem.tif", "size": 107545, "content_type": "image/tiff" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/data.csv", "size": 148, "content_type": "text/csv" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/data.sqlite", "size": 267118, "content_type": "application/x-sqlite3" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/viz.png", "size": 128, "content_type": "image/png" } ] } """ url = "{url_base}/resource/{pid}/files/".format(url_base=self.url_base, pid=pid) return resultsListGenerator(self, url)
python
def getResourceFileList(self, pid): """ Get a listing of files within a resource. :param pid: The HydroShare ID of the resource whose resource files are to be listed. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. :return: A generator that can be used to fetch dict objects, each dict representing the JSON object representation of the resource returned by the REST end point. For example: { "count": 95, "next": "https://www.hydroshare.org/hsapi/resource/32a08bc23a86e471282a832143491b49/file_list/?page=2", "previous": null, "results": [ { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/foo/bar.txt", "size": 23550, "content_type": "text/plain" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/dem.tif", "size": 107545, "content_type": "image/tiff" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/data.csv", "size": 148, "content_type": "text/csv" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/data.sqlite", "size": 267118, "content_type": "application/x-sqlite3" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/viz.png", "size": 128, "content_type": "image/png" } ] } """ url = "{url_base}/resource/{pid}/files/".format(url_base=self.url_base, pid=pid) return resultsListGenerator(self, url)
[ "def", "getResourceFileList", "(", "self", ",", "pid", ")", ":", "url", "=", "\"{url_base}/resource/{pid}/files/\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ",", "pid", "=", "pid", ")", "return", "resultsListGenerator", "(", "self", ",", "url", ")" ]
Get a listing of files within a resource. :param pid: The HydroShare ID of the resource whose resource files are to be listed. :raises: HydroShareArgumentException if any parameters are invalid. :raises: HydroShareNotAuthorized if user is not authorized to perform action. :raises: HydroShareNotFound if the resource was not found. :raises: HydroShareHTTPException if an unexpected HTTP response code is encountered. :return: A generator that can be used to fetch dict objects, each dict representing the JSON object representation of the resource returned by the REST end point. For example: { "count": 95, "next": "https://www.hydroshare.org/hsapi/resource/32a08bc23a86e471282a832143491b49/file_list/?page=2", "previous": null, "results": [ { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/foo/bar.txt", "size": 23550, "content_type": "text/plain" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/dem.tif", "size": 107545, "content_type": "image/tiff" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/data.csv", "size": 148, "content_type": "text/csv" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/data.sqlite", "size": 267118, "content_type": "application/x-sqlite3" }, { "url": "http://www.hydroshare.org/django_irods/download/32a08bc23a86e471282a832143491b49/data/contents/viz.png", "size": 128, "content_type": "image/png" } ] }
[ "Get", "a", "listing", "of", "files", "within", "a", "resource", "." ]
9cd106238b512e01ecd3e33425fe48c13b7f63d5
https://github.com/hydroshare/hs_restclient/blob/9cd106238b512e01ecd3e33425fe48c13b7f63d5/hs_restclient/__init__.py#L946-L994
train
muckamuck/stackility
stackility/utility/get_ssm_parameter.py
get_ssm_parameter
def get_ssm_parameter(parameter_name): ''' Get the decrypted value of an SSM parameter Args: parameter_name - the name of the stored parameter of interest Return: Value if allowed and present else None ''' try: response = boto3.client('ssm').get_parameters( Names=[parameter_name], WithDecryption=True ) return response.get('Parameters', None)[0].get('Value', '') except Exception: pass return ''
python
def get_ssm_parameter(parameter_name): ''' Get the decrypted value of an SSM parameter Args: parameter_name - the name of the stored parameter of interest Return: Value if allowed and present else None ''' try: response = boto3.client('ssm').get_parameters( Names=[parameter_name], WithDecryption=True ) return response.get('Parameters', None)[0].get('Value', '') except Exception: pass return ''
[ "def", "get_ssm_parameter", "(", "parameter_name", ")", ":", "try", ":", "response", "=", "boto3", ".", "client", "(", "'ssm'", ")", ".", "get_parameters", "(", "Names", "=", "[", "parameter_name", "]", ",", "WithDecryption", "=", "True", ")", "return", "response", ".", "get", "(", "'Parameters'", ",", "None", ")", "[", "0", "]", ".", "get", "(", "'Value'", ",", "''", ")", "except", "Exception", ":", "pass", "return", "''" ]
Get the decrypted value of an SSM parameter Args: parameter_name - the name of the stored parameter of interest Return: Value if allowed and present else None
[ "Get", "the", "decrypted", "value", "of", "an", "SSM", "parameter" ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/utility/get_ssm_parameter.py#L6-L26
train
theno/fabsetup
fabsetup/fabfile/setup/powerline.py
powerline
def powerline(): '''Install and set up powerline for vim, bash, tmux, and i3. It uses pip (python2) and the most up to date powerline version (trunk) from the github repository. More infos: https://github.com/powerline/powerline https://powerline.readthedocs.io/en/latest/installation.html https://github.com/powerline/fonts https://youtu.be/_D6RkmgShvU http://www.tecmint.com/powerline-adds-powerful-statuslines-and-prompts-to-vim-and-bash/ ''' bindings_dir, scripts_dir = install_upgrade_powerline() set_up_powerline_fonts() set_up_powerline_daemon(scripts_dir) powerline_for_vim(bindings_dir) powerline_for_bash_or_powerline_shell(bindings_dir) powerline_for_tmux(bindings_dir) powerline_for_i3(bindings_dir) print('\nYou may have to reboot for make changes take effect')
python
def powerline(): '''Install and set up powerline for vim, bash, tmux, and i3. It uses pip (python2) and the most up to date powerline version (trunk) from the github repository. More infos: https://github.com/powerline/powerline https://powerline.readthedocs.io/en/latest/installation.html https://github.com/powerline/fonts https://youtu.be/_D6RkmgShvU http://www.tecmint.com/powerline-adds-powerful-statuslines-and-prompts-to-vim-and-bash/ ''' bindings_dir, scripts_dir = install_upgrade_powerline() set_up_powerline_fonts() set_up_powerline_daemon(scripts_dir) powerline_for_vim(bindings_dir) powerline_for_bash_or_powerline_shell(bindings_dir) powerline_for_tmux(bindings_dir) powerline_for_i3(bindings_dir) print('\nYou may have to reboot for make changes take effect')
[ "def", "powerline", "(", ")", ":", "bindings_dir", ",", "scripts_dir", "=", "install_upgrade_powerline", "(", ")", "set_up_powerline_fonts", "(", ")", "set_up_powerline_daemon", "(", "scripts_dir", ")", "powerline_for_vim", "(", "bindings_dir", ")", "powerline_for_bash_or_powerline_shell", "(", "bindings_dir", ")", "powerline_for_tmux", "(", "bindings_dir", ")", "powerline_for_i3", "(", "bindings_dir", ")", "print", "(", "'\\nYou may have to reboot for make changes take effect'", ")" ]
Install and set up powerline for vim, bash, tmux, and i3. It uses pip (python2) and the most up to date powerline version (trunk) from the github repository. More infos: https://github.com/powerline/powerline https://powerline.readthedocs.io/en/latest/installation.html https://github.com/powerline/fonts https://youtu.be/_D6RkmgShvU http://www.tecmint.com/powerline-adds-powerful-statuslines-and-prompts-to-vim-and-bash/
[ "Install", "and", "set", "up", "powerline", "for", "vim", "bash", "tmux", "and", "i3", "." ]
ced728abff93551ba5677e63bc1bdc0ef5ca5777
https://github.com/theno/fabsetup/blob/ced728abff93551ba5677e63bc1bdc0ef5ca5777/fabsetup/fabfile/setup/powerline.py#L16-L36
train
marcuswhybrow/django-lineage
lineage/templatetags/lineage.py
ifancestor
def ifancestor(parser, token): """ Returns the contents of the tag if the provided path consitutes the base of the current pages path. There are two ways to provide arguments to this tag. Firstly one may provide a single argument that starts with a forward slash. e.g. {% ifancestor '/path/to/page' %}...{% endifancestor} {% ifancestor path_variable %}...{% endifancestor} In this case the provided path will be used directly. Alternatively any arguments accepted by the standard "url" tag may be provided. They will be passed to the url tag and the resultant path will be used. e.g. {% ifancestor 'core:model:detail' model.pk %}...{% endifancestor} Ultimately the provided path is matched against the path of the current page. If the provided path is found at the root of the current path it will be considered an anscestor, and the contents of this tag will be rendered. """ # Grab the contents between contents = parser.parse(('endifancestor',)) parser.delete_first_token() # If there is only one argument (2 including tag name) # parse it as a variable bits = token.split_contents() if len(bits) == 2: arg = parser.compile_filter(bits[1]) else: arg = None # Also pass all arguments to the original url tag url_node = url(parser, token) return AncestorNode(url_node, arg=arg, contents=contents)
python
def ifancestor(parser, token): """ Returns the contents of the tag if the provided path consitutes the base of the current pages path. There are two ways to provide arguments to this tag. Firstly one may provide a single argument that starts with a forward slash. e.g. {% ifancestor '/path/to/page' %}...{% endifancestor} {% ifancestor path_variable %}...{% endifancestor} In this case the provided path will be used directly. Alternatively any arguments accepted by the standard "url" tag may be provided. They will be passed to the url tag and the resultant path will be used. e.g. {% ifancestor 'core:model:detail' model.pk %}...{% endifancestor} Ultimately the provided path is matched against the path of the current page. If the provided path is found at the root of the current path it will be considered an anscestor, and the contents of this tag will be rendered. """ # Grab the contents between contents = parser.parse(('endifancestor',)) parser.delete_first_token() # If there is only one argument (2 including tag name) # parse it as a variable bits = token.split_contents() if len(bits) == 2: arg = parser.compile_filter(bits[1]) else: arg = None # Also pass all arguments to the original url tag url_node = url(parser, token) return AncestorNode(url_node, arg=arg, contents=contents)
[ "def", "ifancestor", "(", "parser", ",", "token", ")", ":", "contents", "=", "parser", ".", "parse", "(", "(", "'endifancestor'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", "==", "2", ":", "arg", "=", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", "else", ":", "arg", "=", "None", "url_node", "=", "url", "(", "parser", ",", "token", ")", "return", "AncestorNode", "(", "url_node", ",", "arg", "=", "arg", ",", "contents", "=", "contents", ")" ]
Returns the contents of the tag if the provided path consitutes the base of the current pages path. There are two ways to provide arguments to this tag. Firstly one may provide a single argument that starts with a forward slash. e.g. {% ifancestor '/path/to/page' %}...{% endifancestor} {% ifancestor path_variable %}...{% endifancestor} In this case the provided path will be used directly. Alternatively any arguments accepted by the standard "url" tag may be provided. They will be passed to the url tag and the resultant path will be used. e.g. {% ifancestor 'core:model:detail' model.pk %}...{% endifancestor} Ultimately the provided path is matched against the path of the current page. If the provided path is found at the root of the current path it will be considered an anscestor, and the contents of this tag will be rendered.
[ "Returns", "the", "contents", "of", "the", "tag", "if", "the", "provided", "path", "consitutes", "the", "base", "of", "the", "current", "pages", "path", "." ]
2bd18b54f721dd39bacf5fe5e7f07e7e99b75b5e
https://github.com/marcuswhybrow/django-lineage/blob/2bd18b54f721dd39bacf5fe5e7f07e7e99b75b5e/lineage/templatetags/lineage.py#L14-L54
train
yahoo/serviceping
serviceping/cli.py
exit_statistics
def exit_statistics(hostname, start_time, count_sent, count_received, min_time, avg_time, max_time, deviation): """ Print ping exit statistics """ end_time = datetime.datetime.now() duration = end_time - start_time duration_sec = float(duration.seconds * 1000) duration_ms = float(duration.microseconds / 1000) duration = duration_sec + duration_ms package_loss = 100 - ((float(count_received) / float(count_sent)) * 100) print(f'\b\b--- {hostname} ping statistics ---') try: print(f'{count_sent} packages transmitted, {count_received} received, {package_loss}% package loss, time {duration}ms') except ZeroDivisionError: print(f'{count_sent} packets transmitted, {count_received} received, 100% packet loss, time {duration}ms') print( 'rtt min/avg/max/dev = %.2f/%.2f/%.2f/%.2f ms' % ( min_time.seconds*1000 + float(min_time.microseconds)/1000, float(avg_time) / 1000, max_time.seconds*1000 + float(max_time.microseconds)/1000, float(deviation) ) )
python
def exit_statistics(hostname, start_time, count_sent, count_received, min_time, avg_time, max_time, deviation): """ Print ping exit statistics """ end_time = datetime.datetime.now() duration = end_time - start_time duration_sec = float(duration.seconds * 1000) duration_ms = float(duration.microseconds / 1000) duration = duration_sec + duration_ms package_loss = 100 - ((float(count_received) / float(count_sent)) * 100) print(f'\b\b--- {hostname} ping statistics ---') try: print(f'{count_sent} packages transmitted, {count_received} received, {package_loss}% package loss, time {duration}ms') except ZeroDivisionError: print(f'{count_sent} packets transmitted, {count_received} received, 100% packet loss, time {duration}ms') print( 'rtt min/avg/max/dev = %.2f/%.2f/%.2f/%.2f ms' % ( min_time.seconds*1000 + float(min_time.microseconds)/1000, float(avg_time) / 1000, max_time.seconds*1000 + float(max_time.microseconds)/1000, float(deviation) ) )
[ "def", "exit_statistics", "(", "hostname", ",", "start_time", ",", "count_sent", ",", "count_received", ",", "min_time", ",", "avg_time", ",", "max_time", ",", "deviation", ")", ":", "end_time", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "duration", "=", "end_time", "-", "start_time", "duration_sec", "=", "float", "(", "duration", ".", "seconds", "*", "1000", ")", "duration_ms", "=", "float", "(", "duration", ".", "microseconds", "/", "1000", ")", "duration", "=", "duration_sec", "+", "duration_ms", "package_loss", "=", "100", "-", "(", "(", "float", "(", "count_received", ")", "/", "float", "(", "count_sent", ")", ")", "*", "100", ")", "print", "(", "f'\\b\\b--- {hostname} ping statistics ---'", ")", "try", ":", "print", "(", "f'{count_sent} packages transmitted, {count_received} received, {package_loss}% package loss, time {duration}ms'", ")", "except", "ZeroDivisionError", ":", "print", "(", "f'{count_sent} packets transmitted, {count_received} received, 100% packet loss, time {duration}ms'", ")", "print", "(", "'rtt min/avg/max/dev = %.2f/%.2f/%.2f/%.2f ms'", "%", "(", "min_time", ".", "seconds", "*", "1000", "+", "float", "(", "min_time", ".", "microseconds", ")", "/", "1000", ",", "float", "(", "avg_time", ")", "/", "1000", ",", "max_time", ".", "seconds", "*", "1000", "+", "float", "(", "max_time", ".", "microseconds", ")", "/", "1000", ",", "float", "(", "deviation", ")", ")", ")" ]
Print ping exit statistics
[ "Print", "ping", "exit", "statistics" ]
1f9df5ee5b3cba466426b1164262278472ba4977
https://github.com/yahoo/serviceping/blob/1f9df5ee5b3cba466426b1164262278472ba4977/serviceping/cli.py#L23-L45
train
jaraco/jaraco.windows
jaraco/windows/filesystem/change.py
Notifier._filtered_walk
def _filtered_walk(path, file_filter): """ static method that calls os.walk, but filters out anything that doesn't match the filter """ for root, dirs, files in os.walk(path): log.debug('looking in %s', root) log.debug('files is %s', files) file_filter.set_root(root) files = filter(file_filter, files) log.debug('filtered files is %s', files) yield (root, dirs, files)
python
def _filtered_walk(path, file_filter): """ static method that calls os.walk, but filters out anything that doesn't match the filter """ for root, dirs, files in os.walk(path): log.debug('looking in %s', root) log.debug('files is %s', files) file_filter.set_root(root) files = filter(file_filter, files) log.debug('filtered files is %s', files) yield (root, dirs, files)
[ "def", "_filtered_walk", "(", "path", ",", "file_filter", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "path", ")", ":", "log", ".", "debug", "(", "'looking in %s'", ",", "root", ")", "log", ".", "debug", "(", "'files is %s'", ",", "files", ")", "file_filter", ".", "set_root", "(", "root", ")", "files", "=", "filter", "(", "file_filter", ",", "files", ")", "log", ".", "debug", "(", "'filtered files is %s'", ",", "files", ")", "yield", "(", "root", ",", "dirs", ",", "files", ")" ]
static method that calls os.walk, but filters out anything that doesn't match the filter
[ "static", "method", "that", "calls", "os", ".", "walk", "but", "filters", "out", "anything", "that", "doesn", "t", "match", "the", "filter" ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/filesystem/change.py#L171-L182
train
pmac/django-redirect-urls
redirect_urls/decorators.py
cache_control_expires
def cache_control_expires(num_hours): """ Set the appropriate Cache-Control and Expires headers for the given number of hours. """ num_seconds = int(num_hours * 60 * 60) def decorator(func): @wraps(func) def inner(request, *args, **kwargs): response = func(request, *args, **kwargs) patch_response_headers(response, num_seconds) return response return inner return decorator
python
def cache_control_expires(num_hours): """ Set the appropriate Cache-Control and Expires headers for the given number of hours. """ num_seconds = int(num_hours * 60 * 60) def decorator(func): @wraps(func) def inner(request, *args, **kwargs): response = func(request, *args, **kwargs) patch_response_headers(response, num_seconds) return response return inner return decorator
[ "def", "cache_control_expires", "(", "num_hours", ")", ":", "num_seconds", "=", "int", "(", "num_hours", "*", "60", "*", "60", ")", "def", "decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "inner", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "response", "=", "func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "patch_response_headers", "(", "response", ",", "num_seconds", ")", "return", "response", "return", "inner", "return", "decorator" ]
Set the appropriate Cache-Control and Expires headers for the given number of hours.
[ "Set", "the", "appropriate", "Cache", "-", "Control", "and", "Expires", "headers", "for", "the", "given", "number", "of", "hours", "." ]
21495194b0b2a2bdd1013e13ec0d54d34dd7f750
https://github.com/pmac/django-redirect-urls/blob/21495194b0b2a2bdd1013e13ec0d54d34dd7f750/redirect_urls/decorators.py#L10-L26
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility.upsert
def upsert(self): """ The main event of the utility. Create or update a Cloud Formation stack. Injecting properties where needed Args: None Returns: True if the stack create/update is started successfully else False if the start goes off in the weeds. Exits: If the user asked for a dryrun exit(with a code 0) the thing here. There is no point continuing after that point. """ required_parameters = [] self._stackParameters = [] try: self._initialize_upsert() except Exception: return False try: available_parameters = self._parameters.keys() for parameter_name in self._template.get('Parameters', {}): required_parameters.append(str(parameter_name)) logging.info(' required parameters: ' + str(required_parameters)) logging.info('available parameters: ' + str(available_parameters)) parameters = [] for required_parameter in required_parameters: parameter = {} parameter['ParameterKey'] = str(required_parameter) required_parameter = str(required_parameter) if required_parameter in self._parameters: parameter['ParameterValue'] = self._parameters[required_parameter] else: parameter['ParameterValue'] = self._parameters[required_parameter.lower()] parameters.append(parameter) if not self._analyze_stuff(): sys.exit(1) if self._config.get('dryrun', False): logging.info('Generating change set') set_id = self._generate_change_set(parameters) if set_id: self._describe_change_set(set_id) logging.info('This was a dryrun') sys.exit(0) self._tags.append({"Key": "CODE_VERSION_SD", "Value": self._config.get('codeVersion')}) self._tags.append({"Key": "ANSWER", "Value": str(42)}) if self._updateStack: stack = self._cloudFormation.update_stack( StackName=self._config.get('environment', {}).get('stack_name', None), TemplateURL=self._templateUrl, Parameters=parameters, Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], Tags=self._tags, ClientRequestToken=str(uuid.uuid4()) ) logging.info('existing stack ID: {}'.format(stack.get('StackId', 'unknown'))) else: stack = self._cloudFormation.create_stack( StackName=self._config.get('environment', {}).get('stack_name', None), TemplateURL=self._templateUrl, Parameters=parameters, Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], Tags=self._tags, ClientRequestToken=str(uuid.uuid4()) ) logging.info('new stack ID: {}'.format(stack.get('StackId', 'unknown'))) except Exception as x: if self._verbose: logging.error(x, exc_info=True) else: logging.error(x, exc_info=False) return False return True
python
def upsert(self): """ The main event of the utility. Create or update a Cloud Formation stack. Injecting properties where needed Args: None Returns: True if the stack create/update is started successfully else False if the start goes off in the weeds. Exits: If the user asked for a dryrun exit(with a code 0) the thing here. There is no point continuing after that point. """ required_parameters = [] self._stackParameters = [] try: self._initialize_upsert() except Exception: return False try: available_parameters = self._parameters.keys() for parameter_name in self._template.get('Parameters', {}): required_parameters.append(str(parameter_name)) logging.info(' required parameters: ' + str(required_parameters)) logging.info('available parameters: ' + str(available_parameters)) parameters = [] for required_parameter in required_parameters: parameter = {} parameter['ParameterKey'] = str(required_parameter) required_parameter = str(required_parameter) if required_parameter in self._parameters: parameter['ParameterValue'] = self._parameters[required_parameter] else: parameter['ParameterValue'] = self._parameters[required_parameter.lower()] parameters.append(parameter) if not self._analyze_stuff(): sys.exit(1) if self._config.get('dryrun', False): logging.info('Generating change set') set_id = self._generate_change_set(parameters) if set_id: self._describe_change_set(set_id) logging.info('This was a dryrun') sys.exit(0) self._tags.append({"Key": "CODE_VERSION_SD", "Value": self._config.get('codeVersion')}) self._tags.append({"Key": "ANSWER", "Value": str(42)}) if self._updateStack: stack = self._cloudFormation.update_stack( StackName=self._config.get('environment', {}).get('stack_name', None), TemplateURL=self._templateUrl, Parameters=parameters, Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], Tags=self._tags, ClientRequestToken=str(uuid.uuid4()) ) logging.info('existing stack ID: {}'.format(stack.get('StackId', 'unknown'))) else: stack = self._cloudFormation.create_stack( StackName=self._config.get('environment', {}).get('stack_name', None), TemplateURL=self._templateUrl, Parameters=parameters, Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'], Tags=self._tags, ClientRequestToken=str(uuid.uuid4()) ) logging.info('new stack ID: {}'.format(stack.get('StackId', 'unknown'))) except Exception as x: if self._verbose: logging.error(x, exc_info=True) else: logging.error(x, exc_info=False) return False return True
[ "def", "upsert", "(", "self", ")", ":", "required_parameters", "=", "[", "]", "self", ".", "_stackParameters", "=", "[", "]", "try", ":", "self", ".", "_initialize_upsert", "(", ")", "except", "Exception", ":", "return", "False", "try", ":", "available_parameters", "=", "self", ".", "_parameters", ".", "keys", "(", ")", "for", "parameter_name", "in", "self", ".", "_template", ".", "get", "(", "'Parameters'", ",", "{", "}", ")", ":", "required_parameters", ".", "append", "(", "str", "(", "parameter_name", ")", ")", "logging", ".", "info", "(", "' required parameters: '", "+", "str", "(", "required_parameters", ")", ")", "logging", ".", "info", "(", "'available parameters: '", "+", "str", "(", "available_parameters", ")", ")", "parameters", "=", "[", "]", "for", "required_parameter", "in", "required_parameters", ":", "parameter", "=", "{", "}", "parameter", "[", "'ParameterKey'", "]", "=", "str", "(", "required_parameter", ")", "required_parameter", "=", "str", "(", "required_parameter", ")", "if", "required_parameter", "in", "self", ".", "_parameters", ":", "parameter", "[", "'ParameterValue'", "]", "=", "self", ".", "_parameters", "[", "required_parameter", "]", "else", ":", "parameter", "[", "'ParameterValue'", "]", "=", "self", ".", "_parameters", "[", "required_parameter", ".", "lower", "(", ")", "]", "parameters", ".", "append", "(", "parameter", ")", "if", "not", "self", ".", "_analyze_stuff", "(", ")", ":", "sys", ".", "exit", "(", "1", ")", "if", "self", ".", "_config", ".", "get", "(", "'dryrun'", ",", "False", ")", ":", "logging", ".", "info", "(", "'Generating change set'", ")", "set_id", "=", "self", ".", "_generate_change_set", "(", "parameters", ")", "if", "set_id", ":", "self", ".", "_describe_change_set", "(", "set_id", ")", "logging", ".", "info", "(", "'This was a dryrun'", ")", "sys", ".", "exit", "(", "0", ")", "self", ".", "_tags", ".", "append", "(", "{", "\"Key\"", ":", "\"CODE_VERSION_SD\"", ",", "\"Value\"", ":", "self", ".", "_config", ".", "get", "(", "'codeVersion'", ")", "}", ")", "self", ".", "_tags", ".", "append", "(", "{", "\"Key\"", ":", "\"ANSWER\"", ",", "\"Value\"", ":", "str", "(", "42", ")", "}", ")", "if", "self", ".", "_updateStack", ":", "stack", "=", "self", ".", "_cloudFormation", ".", "update_stack", "(", "StackName", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'stack_name'", ",", "None", ")", ",", "TemplateURL", "=", "self", ".", "_templateUrl", ",", "Parameters", "=", "parameters", ",", "Capabilities", "=", "[", "'CAPABILITY_IAM'", ",", "'CAPABILITY_NAMED_IAM'", "]", ",", "Tags", "=", "self", ".", "_tags", ",", "ClientRequestToken", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ")", "logging", ".", "info", "(", "'existing stack ID: {}'", ".", "format", "(", "stack", ".", "get", "(", "'StackId'", ",", "'unknown'", ")", ")", ")", "else", ":", "stack", "=", "self", ".", "_cloudFormation", ".", "create_stack", "(", "StackName", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'stack_name'", ",", "None", ")", ",", "TemplateURL", "=", "self", ".", "_templateUrl", ",", "Parameters", "=", "parameters", ",", "Capabilities", "=", "[", "'CAPABILITY_IAM'", ",", "'CAPABILITY_NAMED_IAM'", "]", ",", "Tags", "=", "self", ".", "_tags", ",", "ClientRequestToken", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ")", "logging", ".", "info", "(", "'new stack ID: {}'", ".", "format", "(", "stack", ".", "get", "(", "'StackId'", ",", "'unknown'", ")", ")", ")", "except", "Exception", "as", "x", ":", "if", "self", ".", "_verbose", ":", "logging", ".", "error", "(", "x", ",", "exc_info", "=", "True", ")", "else", ":", "logging", ".", "error", "(", "x", ",", "exc_info", "=", "False", ")", "return", "False", "return", "True" ]
The main event of the utility. Create or update a Cloud Formation stack. Injecting properties where needed Args: None Returns: True if the stack create/update is started successfully else False if the start goes off in the weeds. Exits: If the user asked for a dryrun exit(with a code 0) the thing here. There is no point continuing after that point.
[ "The", "main", "event", "of", "the", "utility", ".", "Create", "or", "update", "a", "Cloud", "Formation", "stack", ".", "Injecting", "properties", "where", "needed" ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L89-L179
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility.list
def list(self): """ List the existing stacks in the indicated region Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems. """ self._initialize_list() interested = True response = self._cloudFormation.list_stacks() print('Stack(s):') while interested: if 'StackSummaries' in response: for stack in response['StackSummaries']: stack_status = stack['StackStatus'] if stack_status != 'DELETE_COMPLETE': print(' [{}] - {}'.format(stack['StackStatus'], stack['StackName'])) next_token = response.get('NextToken', None) if next_token: response = self._cloudFormation.list_stacks(NextToken=next_token) else: interested = False return True
python
def list(self): """ List the existing stacks in the indicated region Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems. """ self._initialize_list() interested = True response = self._cloudFormation.list_stacks() print('Stack(s):') while interested: if 'StackSummaries' in response: for stack in response['StackSummaries']: stack_status = stack['StackStatus'] if stack_status != 'DELETE_COMPLETE': print(' [{}] - {}'.format(stack['StackStatus'], stack['StackName'])) next_token = response.get('NextToken', None) if next_token: response = self._cloudFormation.list_stacks(NextToken=next_token) else: interested = False return True
[ "def", "list", "(", "self", ")", ":", "self", ".", "_initialize_list", "(", ")", "interested", "=", "True", "response", "=", "self", ".", "_cloudFormation", ".", "list_stacks", "(", ")", "print", "(", "'Stack(s):'", ")", "while", "interested", ":", "if", "'StackSummaries'", "in", "response", ":", "for", "stack", "in", "response", "[", "'StackSummaries'", "]", ":", "stack_status", "=", "stack", "[", "'StackStatus'", "]", "if", "stack_status", "!=", "'DELETE_COMPLETE'", ":", "print", "(", "' [{}] - {}'", ".", "format", "(", "stack", "[", "'StackStatus'", "]", ",", "stack", "[", "'StackName'", "]", ")", ")", "next_token", "=", "response", ".", "get", "(", "'NextToken'", ",", "None", ")", "if", "next_token", ":", "response", "=", "self", ".", "_cloudFormation", ".", "list_stacks", "(", "NextToken", "=", "next_token", ")", "else", ":", "interested", "=", "False", "return", "True" ]
List the existing stacks in the indicated region Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems.
[ "List", "the", "existing", "stacks", "in", "the", "indicated", "region" ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L321-L353
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility.smash
def smash(self): """ Smash the given stack Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems. """ self._initialize_smash() try: stack_name = self._config.get('environment', {}).get('stack_name', None) response = self._cloudFormation.describe_stacks(StackName=stack_name) logging.debug('smash pre-flight returned: {}'.format( json.dumps(response, indent=4, default=json_util.default ))) except ClientError as wtf: logging.warning('your stack is in another castle [0].') return False except Exception as wtf: logging.error('failed to find intial status of smash candidate: {}'.format(wtf)) return False response = self._cloudFormation.delete_stack(StackName=stack_name) logging.info('delete started for stack: {}'.format(stack_name)) logging.debug('delete_stack returned: {}'.format(json.dumps(response, indent=4))) return self.poll_stack()
python
def smash(self): """ Smash the given stack Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems. """ self._initialize_smash() try: stack_name = self._config.get('environment', {}).get('stack_name', None) response = self._cloudFormation.describe_stacks(StackName=stack_name) logging.debug('smash pre-flight returned: {}'.format( json.dumps(response, indent=4, default=json_util.default ))) except ClientError as wtf: logging.warning('your stack is in another castle [0].') return False except Exception as wtf: logging.error('failed to find intial status of smash candidate: {}'.format(wtf)) return False response = self._cloudFormation.delete_stack(StackName=stack_name) logging.info('delete started for stack: {}'.format(stack_name)) logging.debug('delete_stack returned: {}'.format(json.dumps(response, indent=4))) return self.poll_stack()
[ "def", "smash", "(", "self", ")", ":", "self", ".", "_initialize_smash", "(", ")", "try", ":", "stack_name", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'stack_name'", ",", "None", ")", "response", "=", "self", ".", "_cloudFormation", ".", "describe_stacks", "(", "StackName", "=", "stack_name", ")", "logging", ".", "debug", "(", "'smash pre-flight returned: {}'", ".", "format", "(", "json", ".", "dumps", "(", "response", ",", "indent", "=", "4", ",", "default", "=", "json_util", ".", "default", ")", ")", ")", "except", "ClientError", "as", "wtf", ":", "logging", ".", "warning", "(", "'your stack is in another castle [0].'", ")", "return", "False", "except", "Exception", "as", "wtf", ":", "logging", ".", "error", "(", "'failed to find intial status of smash candidate: {}'", ".", "format", "(", "wtf", ")", ")", "return", "False", "response", "=", "self", ".", "_cloudFormation", ".", "delete_stack", "(", "StackName", "=", "stack_name", ")", "logging", ".", "info", "(", "'delete started for stack: {}'", ".", "format", "(", "stack_name", ")", ")", "logging", ".", "debug", "(", "'delete_stack returned: {}'", ".", "format", "(", "json", ".", "dumps", "(", "response", ",", "indent", "=", "4", ")", ")", ")", "return", "self", ".", "poll_stack", "(", ")" ]
Smash the given stack Args: None Returns: True if True Todo: Figure out what could go wrong and take steps to hanlde problems.
[ "Smash", "the", "given", "stack" ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L355-L388
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility._init_boto3_clients
def _init_boto3_clients(self): """ The utililty requires boto3 clients to Cloud Formation and S3. Here is where we make them. Args: None Returns: Good or Bad; True or False """ try: profile = self._config.get('environment', {}).get('profile') region = self._config.get('environment', {}).get('region') if profile: self._b3Sess = boto3.session.Session(profile_name=profile) else: self._b3Sess = boto3.session.Session() self._s3 = self._b3Sess.client('s3') self._cloudFormation = self._b3Sess.client('cloudformation', region_name=region) self._ssm = self._b3Sess.client('ssm', region_name=region) return True except Exception as wtf: logging.error('Exception caught in intialize_session(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return False
python
def _init_boto3_clients(self): """ The utililty requires boto3 clients to Cloud Formation and S3. Here is where we make them. Args: None Returns: Good or Bad; True or False """ try: profile = self._config.get('environment', {}).get('profile') region = self._config.get('environment', {}).get('region') if profile: self._b3Sess = boto3.session.Session(profile_name=profile) else: self._b3Sess = boto3.session.Session() self._s3 = self._b3Sess.client('s3') self._cloudFormation = self._b3Sess.client('cloudformation', region_name=region) self._ssm = self._b3Sess.client('ssm', region_name=region) return True except Exception as wtf: logging.error('Exception caught in intialize_session(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return False
[ "def", "_init_boto3_clients", "(", "self", ")", ":", "try", ":", "profile", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'profile'", ")", "region", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'region'", ")", "if", "profile", ":", "self", ".", "_b3Sess", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "profile", ")", "else", ":", "self", ".", "_b3Sess", "=", "boto3", ".", "session", ".", "Session", "(", ")", "self", ".", "_s3", "=", "self", ".", "_b3Sess", ".", "client", "(", "'s3'", ")", "self", ".", "_cloudFormation", "=", "self", ".", "_b3Sess", ".", "client", "(", "'cloudformation'", ",", "region_name", "=", "region", ")", "self", ".", "_ssm", "=", "self", ".", "_b3Sess", ".", "client", "(", "'ssm'", ",", "region_name", "=", "region", ")", "return", "True", "except", "Exception", "as", "wtf", ":", "logging", ".", "error", "(", "'Exception caught in intialize_session(): {}'", ".", "format", "(", "wtf", ")", ")", "traceback", ".", "print_exc", "(", "file", "=", "sys", ".", "stdout", ")", "return", "False" ]
The utililty requires boto3 clients to Cloud Formation and S3. Here is where we make them. Args: None Returns: Good or Bad; True or False
[ "The", "utililty", "requires", "boto3", "clients", "to", "Cloud", "Formation", "and", "S3", ".", "Here", "is", "where", "we", "make", "them", "." ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L390-L417
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility._get_ssm_parameter
def _get_ssm_parameter(self, p): """ Get parameters from Simple Systems Manager Args: p - a parameter name Returns: a value, decrypted if needed, if successful or None if things go sideways. """ try: response = self._ssm.get_parameter(Name=p, WithDecryption=True) return response.get('Parameter', {}).get('Value', None) except Exception as ruh_roh: logging.error(ruh_roh, exc_info=False) return None
python
def _get_ssm_parameter(self, p): """ Get parameters from Simple Systems Manager Args: p - a parameter name Returns: a value, decrypted if needed, if successful or None if things go sideways. """ try: response = self._ssm.get_parameter(Name=p, WithDecryption=True) return response.get('Parameter', {}).get('Value', None) except Exception as ruh_roh: logging.error(ruh_roh, exc_info=False) return None
[ "def", "_get_ssm_parameter", "(", "self", ",", "p", ")", ":", "try", ":", "response", "=", "self", ".", "_ssm", ".", "get_parameter", "(", "Name", "=", "p", ",", "WithDecryption", "=", "True", ")", "return", "response", ".", "get", "(", "'Parameter'", ",", "{", "}", ")", ".", "get", "(", "'Value'", ",", "None", ")", "except", "Exception", "as", "ruh_roh", ":", "logging", ".", "error", "(", "ruh_roh", ",", "exc_info", "=", "False", ")", "return", "None" ]
Get parameters from Simple Systems Manager Args: p - a parameter name Returns: a value, decrypted if needed, if successful or None if things go sideways.
[ "Get", "parameters", "from", "Simple", "Systems", "Manager" ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L434-L451
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility._fill_parameters
def _fill_parameters(self): """ Fill in the _parameters dict from the properties file. Args: None Returns: True Todo: Figure out what could go wrong and at least acknowledge the the fact that Murphy was an optimist. """ self._parameters = self._config.get('parameters', {}) self._fill_defaults() for k in self._parameters.keys(): try: if self._parameters[k].startswith(self.SSM) and self._parameters[k].endswith(']'): parts = self._parameters[k].split(':') tmp = parts[1].replace(']', '') val = self._get_ssm_parameter(tmp) if val: self._parameters[k] = val else: logging.error('SSM parameter {} not found'.format(tmp)) return False elif self._parameters[k] == self.ASK: val = None a1 = '__x___' a2 = '__y___' prompt1 = "Enter value for '{}': ".format(k) prompt2 = "Confirm value for '{}': ".format(k) while a1 != a2: a1 = getpass.getpass(prompt=prompt1) a2 = getpass.getpass(prompt=prompt2) if a1 == a2: val = a1 else: print('values do not match, try again') self._parameters[k] = val except: pass return True
python
def _fill_parameters(self): """ Fill in the _parameters dict from the properties file. Args: None Returns: True Todo: Figure out what could go wrong and at least acknowledge the the fact that Murphy was an optimist. """ self._parameters = self._config.get('parameters', {}) self._fill_defaults() for k in self._parameters.keys(): try: if self._parameters[k].startswith(self.SSM) and self._parameters[k].endswith(']'): parts = self._parameters[k].split(':') tmp = parts[1].replace(']', '') val = self._get_ssm_parameter(tmp) if val: self._parameters[k] = val else: logging.error('SSM parameter {} not found'.format(tmp)) return False elif self._parameters[k] == self.ASK: val = None a1 = '__x___' a2 = '__y___' prompt1 = "Enter value for '{}': ".format(k) prompt2 = "Confirm value for '{}': ".format(k) while a1 != a2: a1 = getpass.getpass(prompt=prompt1) a2 = getpass.getpass(prompt=prompt2) if a1 == a2: val = a1 else: print('values do not match, try again') self._parameters[k] = val except: pass return True
[ "def", "_fill_parameters", "(", "self", ")", ":", "self", ".", "_parameters", "=", "self", ".", "_config", ".", "get", "(", "'parameters'", ",", "{", "}", ")", "self", ".", "_fill_defaults", "(", ")", "for", "k", "in", "self", ".", "_parameters", ".", "keys", "(", ")", ":", "try", ":", "if", "self", ".", "_parameters", "[", "k", "]", ".", "startswith", "(", "self", ".", "SSM", ")", "and", "self", ".", "_parameters", "[", "k", "]", ".", "endswith", "(", "']'", ")", ":", "parts", "=", "self", ".", "_parameters", "[", "k", "]", ".", "split", "(", "':'", ")", "tmp", "=", "parts", "[", "1", "]", ".", "replace", "(", "']'", ",", "''", ")", "val", "=", "self", ".", "_get_ssm_parameter", "(", "tmp", ")", "if", "val", ":", "self", ".", "_parameters", "[", "k", "]", "=", "val", "else", ":", "logging", ".", "error", "(", "'SSM parameter {} not found'", ".", "format", "(", "tmp", ")", ")", "return", "False", "elif", "self", ".", "_parameters", "[", "k", "]", "==", "self", ".", "ASK", ":", "val", "=", "None", "a1", "=", "'__x", "'", "a2", "=", "'__y", "'", "prompt1", "=", "\"Enter value for '{}': \"", ".", "format", "(", "k", ")", "prompt2", "=", "\"Confirm value for '{}': \"", ".", "format", "(", "k", ")", "while", "a1", "!=", "a2", ":", "a1", "=", "getpass", ".", "getpass", "(", "prompt", "=", "prompt1", ")", "a2", "=", "getpass", ".", "getpass", "(", "prompt", "=", "prompt2", ")", "if", "a1", "==", "a2", ":", "val", "=", "a1", "else", ":", "print", "(", "'values do not match, try again'", ")", "self", ".", "_parameters", "[", "k", "]", "=", "val", "except", ":", "pass", "return", "True" ]
Fill in the _parameters dict from the properties file. Args: None Returns: True Todo: Figure out what could go wrong and at least acknowledge the the fact that Murphy was an optimist.
[ "Fill", "in", "the", "_parameters", "dict", "from", "the", "properties", "file", "." ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L453-L498
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility._read_tags
def _read_tags(self): """ Fill in the _tags dict from the tags file. Args: None Returns: True Todo: Figure what could go wrong and at least acknowledge the the fact that Murphy was an optimist. """ tags = self._config.get('tags', {}) logging.info('Tags:') for tag_name in tags.keys(): tag = {} tag['Key'] = tag_name tag['Value'] = tags[tag_name] self._tags.append(tag) logging.info('{} = {}'.format(tag_name, tags[tag_name])) logging.debug(json.dumps( self._tags, indent=2, sort_keys=True )) return True
python
def _read_tags(self): """ Fill in the _tags dict from the tags file. Args: None Returns: True Todo: Figure what could go wrong and at least acknowledge the the fact that Murphy was an optimist. """ tags = self._config.get('tags', {}) logging.info('Tags:') for tag_name in tags.keys(): tag = {} tag['Key'] = tag_name tag['Value'] = tags[tag_name] self._tags.append(tag) logging.info('{} = {}'.format(tag_name, tags[tag_name])) logging.debug(json.dumps( self._tags, indent=2, sort_keys=True )) return True
[ "def", "_read_tags", "(", "self", ")", ":", "tags", "=", "self", ".", "_config", ".", "get", "(", "'tags'", ",", "{", "}", ")", "logging", ".", "info", "(", "'Tags:'", ")", "for", "tag_name", "in", "tags", ".", "keys", "(", ")", ":", "tag", "=", "{", "}", "tag", "[", "'Key'", "]", "=", "tag_name", "tag", "[", "'Value'", "]", "=", "tags", "[", "tag_name", "]", "self", ".", "_tags", ".", "append", "(", "tag", ")", "logging", ".", "info", "(", "'{} = {}'", ".", "format", "(", "tag_name", ",", "tags", "[", "tag_name", "]", ")", ")", "logging", ".", "debug", "(", "json", ".", "dumps", "(", "self", ".", "_tags", ",", "indent", "=", "2", ",", "sort_keys", "=", "True", ")", ")", "return", "True" ]
Fill in the _tags dict from the tags file. Args: None Returns: True Todo: Figure what could go wrong and at least acknowledge the the fact that Murphy was an optimist.
[ "Fill", "in", "the", "_tags", "dict", "from", "the", "tags", "file", "." ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L500-L528
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility._set_update
def _set_update(self): """ Determine if we are creating a new stack or updating and existing one. The update member is set as you would expect at the end of this query. Args: None Returns: True """ try: self._updateStack = False stack_name = self._config.get('environment', {}).get('stack_name', None) response = self._cloudFormation.describe_stacks(StackName=stack_name) stack = response['Stacks'][0] if stack['StackStatus'] == 'ROLLBACK_COMPLETE': logging.info('stack is in ROLLBACK_COMPLETE status and should be deleted') del_stack_resp = self._cloudFormation.delete_stack(StackName=stack_name) logging.info('delete started for stack: {}'.format(stack_name)) logging.debug('delete_stack returned: {}'.format(json.dumps(del_stack_resp, indent=4))) stack_delete = self.poll_stack() if not stack_delete: return False if stack['StackStatus'] in ['CREATE_COMPLETE', 'UPDATE_COMPLETE', 'UPDATE_ROLLBACK_COMPLETE']: self._updateStack = True except: self._updateStack = False logging.info('update_stack: ' + str(self._updateStack)) return True
python
def _set_update(self): """ Determine if we are creating a new stack or updating and existing one. The update member is set as you would expect at the end of this query. Args: None Returns: True """ try: self._updateStack = False stack_name = self._config.get('environment', {}).get('stack_name', None) response = self._cloudFormation.describe_stacks(StackName=stack_name) stack = response['Stacks'][0] if stack['StackStatus'] == 'ROLLBACK_COMPLETE': logging.info('stack is in ROLLBACK_COMPLETE status and should be deleted') del_stack_resp = self._cloudFormation.delete_stack(StackName=stack_name) logging.info('delete started for stack: {}'.format(stack_name)) logging.debug('delete_stack returned: {}'.format(json.dumps(del_stack_resp, indent=4))) stack_delete = self.poll_stack() if not stack_delete: return False if stack['StackStatus'] in ['CREATE_COMPLETE', 'UPDATE_COMPLETE', 'UPDATE_ROLLBACK_COMPLETE']: self._updateStack = True except: self._updateStack = False logging.info('update_stack: ' + str(self._updateStack)) return True
[ "def", "_set_update", "(", "self", ")", ":", "try", ":", "self", ".", "_updateStack", "=", "False", "stack_name", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'stack_name'", ",", "None", ")", "response", "=", "self", ".", "_cloudFormation", ".", "describe_stacks", "(", "StackName", "=", "stack_name", ")", "stack", "=", "response", "[", "'Stacks'", "]", "[", "0", "]", "if", "stack", "[", "'StackStatus'", "]", "==", "'ROLLBACK_COMPLETE'", ":", "logging", ".", "info", "(", "'stack is in ROLLBACK_COMPLETE status and should be deleted'", ")", "del_stack_resp", "=", "self", ".", "_cloudFormation", ".", "delete_stack", "(", "StackName", "=", "stack_name", ")", "logging", ".", "info", "(", "'delete started for stack: {}'", ".", "format", "(", "stack_name", ")", ")", "logging", ".", "debug", "(", "'delete_stack returned: {}'", ".", "format", "(", "json", ".", "dumps", "(", "del_stack_resp", ",", "indent", "=", "4", ")", ")", ")", "stack_delete", "=", "self", ".", "poll_stack", "(", ")", "if", "not", "stack_delete", ":", "return", "False", "if", "stack", "[", "'StackStatus'", "]", "in", "[", "'CREATE_COMPLETE'", ",", "'UPDATE_COMPLETE'", ",", "'UPDATE_ROLLBACK_COMPLETE'", "]", ":", "self", ".", "_updateStack", "=", "True", "except", ":", "self", ".", "_updateStack", "=", "False", "logging", ".", "info", "(", "'update_stack: '", "+", "str", "(", "self", ".", "_updateStack", ")", ")", "return", "True" ]
Determine if we are creating a new stack or updating and existing one. The update member is set as you would expect at the end of this query. Args: None Returns: True
[ "Determine", "if", "we", "are", "creating", "a", "new", "stack", "or", "updating", "and", "existing", "one", ".", "The", "update", "member", "is", "set", "as", "you", "would", "expect", "at", "the", "end", "of", "this", "query", "." ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L530-L561
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility._craft_s3_keys
def _craft_s3_keys(self): """ We are putting stuff into S3, were supplied the bucket. Here we craft the key of the elements we are putting up there in the internet clouds. Args: None Returns: a tuple of teplate file key and property file key """ now = time.gmtime() stub = "templates/{stack_name}/{version}".format( stack_name=self._config.get('environment', {}).get('stack_name', None), version=self._config.get('codeVersion') ) stub = stub + "/" + str(now.tm_year) stub = stub + "/" + str('%02d' % now.tm_mon) stub = stub + "/" + str('%02d' % now.tm_mday) stub = stub + "/" + str('%02d' % now.tm_hour) stub = stub + ":" + str('%02d' % now.tm_min) stub = stub + ":" + str('%02d' % now.tm_sec) if self._yaml: template_key = stub + "/stack.yaml" else: template_key = stub + "/stack.json" property_key = stub + "/stack.properties" return template_key, property_key
python
def _craft_s3_keys(self): """ We are putting stuff into S3, were supplied the bucket. Here we craft the key of the elements we are putting up there in the internet clouds. Args: None Returns: a tuple of teplate file key and property file key """ now = time.gmtime() stub = "templates/{stack_name}/{version}".format( stack_name=self._config.get('environment', {}).get('stack_name', None), version=self._config.get('codeVersion') ) stub = stub + "/" + str(now.tm_year) stub = stub + "/" + str('%02d' % now.tm_mon) stub = stub + "/" + str('%02d' % now.tm_mday) stub = stub + "/" + str('%02d' % now.tm_hour) stub = stub + ":" + str('%02d' % now.tm_min) stub = stub + ":" + str('%02d' % now.tm_sec) if self._yaml: template_key = stub + "/stack.yaml" else: template_key = stub + "/stack.json" property_key = stub + "/stack.properties" return template_key, property_key
[ "def", "_craft_s3_keys", "(", "self", ")", ":", "now", "=", "time", ".", "gmtime", "(", ")", "stub", "=", "\"templates/{stack_name}/{version}\"", ".", "format", "(", "stack_name", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'stack_name'", ",", "None", ")", ",", "version", "=", "self", ".", "_config", ".", "get", "(", "'codeVersion'", ")", ")", "stub", "=", "stub", "+", "\"/\"", "+", "str", "(", "now", ".", "tm_year", ")", "stub", "=", "stub", "+", "\"/\"", "+", "str", "(", "'%02d'", "%", "now", ".", "tm_mon", ")", "stub", "=", "stub", "+", "\"/\"", "+", "str", "(", "'%02d'", "%", "now", ".", "tm_mday", ")", "stub", "=", "stub", "+", "\"/\"", "+", "str", "(", "'%02d'", "%", "now", ".", "tm_hour", ")", "stub", "=", "stub", "+", "\":\"", "+", "str", "(", "'%02d'", "%", "now", ".", "tm_min", ")", "stub", "=", "stub", "+", "\":\"", "+", "str", "(", "'%02d'", "%", "now", ".", "tm_sec", ")", "if", "self", ".", "_yaml", ":", "template_key", "=", "stub", "+", "\"/stack.yaml\"", "else", ":", "template_key", "=", "stub", "+", "\"/stack.json\"", "property_key", "=", "stub", "+", "\"/stack.properties\"", "return", "template_key", ",", "property_key" ]
We are putting stuff into S3, were supplied the bucket. Here we craft the key of the elements we are putting up there in the internet clouds. Args: None Returns: a tuple of teplate file key and property file key
[ "We", "are", "putting", "stuff", "into", "S3", "were", "supplied", "the", "bucket", ".", "Here", "we", "craft", "the", "key", "of", "the", "elements", "we", "are", "putting", "up", "there", "in", "the", "internet", "clouds", "." ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L605-L636
train
muckamuck/stackility
stackility/CloudStackUtility.py
CloudStackUtility.poll_stack
def poll_stack(self): """ Spin in a loop while the Cloud Formation process either fails or succeeds Args: None Returns: Good or bad; True or False """ logging.info('polling stack status, POLL_INTERVAL={}'.format(POLL_INTERVAL)) time.sleep(POLL_INTERVAL) completed_states = [ 'CREATE_COMPLETE', 'UPDATE_COMPLETE', 'DELETE_COMPLETE' ] stack_name = self._config.get('environment', {}).get('stack_name', None) while True: try: response = self._cloudFormation.describe_stacks(StackName=stack_name) stack = response['Stacks'][0] current_status = stack['StackStatus'] logging.info('current status of {}: {}'.format(stack_name, current_status)) if current_status.endswith('COMPLETE') or current_status.endswith('FAILED'): if current_status in completed_states: return True else: return False time.sleep(POLL_INTERVAL) except ClientError as wtf: if str(wtf).find('does not exist') == -1: logging.error('Exception caught in wait_for_stack(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return False else: logging.info('{} is gone'.format(stack_name)) return True except Exception as wtf: logging.error('Exception caught in wait_for_stack(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return False
python
def poll_stack(self): """ Spin in a loop while the Cloud Formation process either fails or succeeds Args: None Returns: Good or bad; True or False """ logging.info('polling stack status, POLL_INTERVAL={}'.format(POLL_INTERVAL)) time.sleep(POLL_INTERVAL) completed_states = [ 'CREATE_COMPLETE', 'UPDATE_COMPLETE', 'DELETE_COMPLETE' ] stack_name = self._config.get('environment', {}).get('stack_name', None) while True: try: response = self._cloudFormation.describe_stacks(StackName=stack_name) stack = response['Stacks'][0] current_status = stack['StackStatus'] logging.info('current status of {}: {}'.format(stack_name, current_status)) if current_status.endswith('COMPLETE') or current_status.endswith('FAILED'): if current_status in completed_states: return True else: return False time.sleep(POLL_INTERVAL) except ClientError as wtf: if str(wtf).find('does not exist') == -1: logging.error('Exception caught in wait_for_stack(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return False else: logging.info('{} is gone'.format(stack_name)) return True except Exception as wtf: logging.error('Exception caught in wait_for_stack(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return False
[ "def", "poll_stack", "(", "self", ")", ":", "logging", ".", "info", "(", "'polling stack status, POLL_INTERVAL={}'", ".", "format", "(", "POLL_INTERVAL", ")", ")", "time", ".", "sleep", "(", "POLL_INTERVAL", ")", "completed_states", "=", "[", "'CREATE_COMPLETE'", ",", "'UPDATE_COMPLETE'", ",", "'DELETE_COMPLETE'", "]", "stack_name", "=", "self", ".", "_config", ".", "get", "(", "'environment'", ",", "{", "}", ")", ".", "get", "(", "'stack_name'", ",", "None", ")", "while", "True", ":", "try", ":", "response", "=", "self", ".", "_cloudFormation", ".", "describe_stacks", "(", "StackName", "=", "stack_name", ")", "stack", "=", "response", "[", "'Stacks'", "]", "[", "0", "]", "current_status", "=", "stack", "[", "'StackStatus'", "]", "logging", ".", "info", "(", "'current status of {}: {}'", ".", "format", "(", "stack_name", ",", "current_status", ")", ")", "if", "current_status", ".", "endswith", "(", "'COMPLETE'", ")", "or", "current_status", ".", "endswith", "(", "'FAILED'", ")", ":", "if", "current_status", "in", "completed_states", ":", "return", "True", "else", ":", "return", "False", "time", ".", "sleep", "(", "POLL_INTERVAL", ")", "except", "ClientError", "as", "wtf", ":", "if", "str", "(", "wtf", ")", ".", "find", "(", "'does not exist'", ")", "==", "-", "1", ":", "logging", ".", "error", "(", "'Exception caught in wait_for_stack(): {}'", ".", "format", "(", "wtf", ")", ")", "traceback", ".", "print_exc", "(", "file", "=", "sys", ".", "stdout", ")", "return", "False", "else", ":", "logging", ".", "info", "(", "'{} is gone'", ".", "format", "(", "stack_name", ")", ")", "return", "True", "except", "Exception", "as", "wtf", ":", "logging", ".", "error", "(", "'Exception caught in wait_for_stack(): {}'", ".", "format", "(", "wtf", ")", ")", "traceback", ".", "print_exc", "(", "file", "=", "sys", ".", "stdout", ")", "return", "False" ]
Spin in a loop while the Cloud Formation process either fails or succeeds Args: None Returns: Good or bad; True or False
[ "Spin", "in", "a", "loop", "while", "the", "Cloud", "Formation", "process", "either", "fails", "or", "succeeds" ]
b1696f02661134d31b99b4dea7c0d21d09482d33
https://github.com/muckamuck/stackility/blob/b1696f02661134d31b99b4dea7c0d21d09482d33/stackility/CloudStackUtility.py#L638-L680
train
theno/fabsetup
fabsetup/fabfile-data/presetting-fabsetup-custom/fabfile_/__init__.py
setup_desktop
def setup_desktop(): '''Run setup tasks to set up a nicely configured desktop pc. This is highly biased on my personal preference. The task is defined in file fabsetup_custom/fabfile_addtitions/__init__.py and could be customized by Your own needs. More info: README.md ''' run('sudo apt-get update') install_packages(packages_desktop) execute(custom.latex) execute(setup.ripping_of_cds) execute(setup.regex_repl) execute(setup.i3) execute(setup.solarized) execute(setup.vim) execute(setup.tmux) execute(setup.pyenv) # circumvent circular import, cf. http://stackoverflow.com/a/18486863 from fabfile import dfh, check_reboot dfh() check_reboot()
python
def setup_desktop(): '''Run setup tasks to set up a nicely configured desktop pc. This is highly biased on my personal preference. The task is defined in file fabsetup_custom/fabfile_addtitions/__init__.py and could be customized by Your own needs. More info: README.md ''' run('sudo apt-get update') install_packages(packages_desktop) execute(custom.latex) execute(setup.ripping_of_cds) execute(setup.regex_repl) execute(setup.i3) execute(setup.solarized) execute(setup.vim) execute(setup.tmux) execute(setup.pyenv) # circumvent circular import, cf. http://stackoverflow.com/a/18486863 from fabfile import dfh, check_reboot dfh() check_reboot()
[ "def", "setup_desktop", "(", ")", ":", "run", "(", "'sudo apt-get update'", ")", "install_packages", "(", "packages_desktop", ")", "execute", "(", "custom", ".", "latex", ")", "execute", "(", "setup", ".", "ripping_of_cds", ")", "execute", "(", "setup", ".", "regex_repl", ")", "execute", "(", "setup", ".", "i3", ")", "execute", "(", "setup", ".", "solarized", ")", "execute", "(", "setup", ".", "vim", ")", "execute", "(", "setup", ".", "tmux", ")", "execute", "(", "setup", ".", "pyenv", ")", "from", "fabfile", "import", "dfh", ",", "check_reboot", "dfh", "(", ")", "check_reboot", "(", ")" ]
Run setup tasks to set up a nicely configured desktop pc. This is highly biased on my personal preference. The task is defined in file fabsetup_custom/fabfile_addtitions/__init__.py and could be customized by Your own needs. More info: README.md
[ "Run", "setup", "tasks", "to", "set", "up", "a", "nicely", "configured", "desktop", "pc", "." ]
ced728abff93551ba5677e63bc1bdc0ef5ca5777
https://github.com/theno/fabsetup/blob/ced728abff93551ba5677e63bc1bdc0ef5ca5777/fabsetup/fabfile-data/presetting-fabsetup-custom/fabfile_/__init__.py#L52-L73
train
theno/fabsetup
fabsetup/fabfile-data/presetting-fabsetup-custom/fabfile_/__init__.py
setup_webserver
def setup_webserver(): '''Run setup tasks to set up a nicely configured webserver. Features: * owncloud service * fdroid repository * certificates via letsencrypt * and more The task is defined in file fabsetup_custom/fabfile_addtitions/__init__.py and could be customized by Your own needs. More info: README.md ''' run('sudo apt-get update') install_packages(packages_webserver) execute(custom.latex) execute(setup.solarized) execute(setup.vim) execute(setup.tmux) checkup_git_repo_legacy(url='[email protected]:letsencrypt/letsencrypt.git') execute(setup.service.fdroid) execute(setup.service.owncloud) # circumvent circular import, cf. http://stackoverflow.com/a/18486863 from fabfile import dfh, check_reboot dfh() check_reboot()
python
def setup_webserver(): '''Run setup tasks to set up a nicely configured webserver. Features: * owncloud service * fdroid repository * certificates via letsencrypt * and more The task is defined in file fabsetup_custom/fabfile_addtitions/__init__.py and could be customized by Your own needs. More info: README.md ''' run('sudo apt-get update') install_packages(packages_webserver) execute(custom.latex) execute(setup.solarized) execute(setup.vim) execute(setup.tmux) checkup_git_repo_legacy(url='[email protected]:letsencrypt/letsencrypt.git') execute(setup.service.fdroid) execute(setup.service.owncloud) # circumvent circular import, cf. http://stackoverflow.com/a/18486863 from fabfile import dfh, check_reboot dfh() check_reboot()
[ "def", "setup_webserver", "(", ")", ":", "run", "(", "'sudo apt-get update'", ")", "install_packages", "(", "packages_webserver", ")", "execute", "(", "custom", ".", "latex", ")", "execute", "(", "setup", ".", "solarized", ")", "execute", "(", "setup", ".", "vim", ")", "execute", "(", "setup", ".", "tmux", ")", "checkup_git_repo_legacy", "(", "url", "=", "'[email protected]:letsencrypt/letsencrypt.git'", ")", "execute", "(", "setup", ".", "service", ".", "fdroid", ")", "execute", "(", "setup", ".", "service", ".", "owncloud", ")", "from", "fabfile", "import", "dfh", ",", "check_reboot", "dfh", "(", ")", "check_reboot", "(", ")" ]
Run setup tasks to set up a nicely configured webserver. Features: * owncloud service * fdroid repository * certificates via letsencrypt * and more The task is defined in file fabsetup_custom/fabfile_addtitions/__init__.py and could be customized by Your own needs. More info: README.md
[ "Run", "setup", "tasks", "to", "set", "up", "a", "nicely", "configured", "webserver", "." ]
ced728abff93551ba5677e63bc1bdc0ef5ca5777
https://github.com/theno/fabsetup/blob/ced728abff93551ba5677e63bc1bdc0ef5ca5777/fabsetup/fabfile-data/presetting-fabsetup-custom/fabfile_/__init__.py#L77-L101
train
John-Lin/snortunsock
snortunsock/snort_listener.py
start_recv
def start_recv(sockfile=None): '''Open a server on Unix Domain Socket''' if sockfile is not None: SOCKFILE = sockfile else: # default sockfile SOCKFILE = "/tmp/snort_alert" if os.path.exists(SOCKFILE): os.unlink(SOCKFILE) unsock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) unsock.bind(SOCKFILE) logging.warning('Unix socket start listening...') while True: data = unsock.recv(BUFSIZE) parsed_msg = alert.AlertPkt.parser(data) if parsed_msg: yield parsed_msg
python
def start_recv(sockfile=None): '''Open a server on Unix Domain Socket''' if sockfile is not None: SOCKFILE = sockfile else: # default sockfile SOCKFILE = "/tmp/snort_alert" if os.path.exists(SOCKFILE): os.unlink(SOCKFILE) unsock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) unsock.bind(SOCKFILE) logging.warning('Unix socket start listening...') while True: data = unsock.recv(BUFSIZE) parsed_msg = alert.AlertPkt.parser(data) if parsed_msg: yield parsed_msg
[ "def", "start_recv", "(", "sockfile", "=", "None", ")", ":", "if", "sockfile", "is", "not", "None", ":", "SOCKFILE", "=", "sockfile", "else", ":", "SOCKFILE", "=", "\"/tmp/snort_alert\"", "if", "os", ".", "path", ".", "exists", "(", "SOCKFILE", ")", ":", "os", ".", "unlink", "(", "SOCKFILE", ")", "unsock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_UNIX", ",", "socket", ".", "SOCK_DGRAM", ")", "unsock", ".", "bind", "(", "SOCKFILE", ")", "logging", ".", "warning", "(", "'Unix socket start listening...'", ")", "while", "True", ":", "data", "=", "unsock", ".", "recv", "(", "BUFSIZE", ")", "parsed_msg", "=", "alert", ".", "AlertPkt", ".", "parser", "(", "data", ")", "if", "parsed_msg", ":", "yield", "parsed_msg" ]
Open a server on Unix Domain Socket
[ "Open", "a", "server", "on", "Unix", "Domain", "Socket" ]
f0eb540d76c02b59e3899a16acafada79754dc3e
https://github.com/John-Lin/snortunsock/blob/f0eb540d76c02b59e3899a16acafada79754dc3e/snortunsock/snort_listener.py#L11-L29
train
bionikspoon/pureyaml
pureyaml/__init__.py
dump
def dump(obj, fp=None, indent=None, sort_keys=False, **kw): """ Dump object to a file like object or string. :param obj: :param fp: Open file like object :param int indent: Indent size, default 2 :param bool sort_keys: Optionally sort dictionary keys. :return: Yaml serialized data. """ if fp: iterable = YAMLEncoder(indent=indent, sort_keys=sort_keys, **kw).iterencode(obj) for chunk in iterable: fp.write(chunk) else: return dumps(obj, indent=indent, sort_keys=sort_keys, **kw)
python
def dump(obj, fp=None, indent=None, sort_keys=False, **kw): """ Dump object to a file like object or string. :param obj: :param fp: Open file like object :param int indent: Indent size, default 2 :param bool sort_keys: Optionally sort dictionary keys. :return: Yaml serialized data. """ if fp: iterable = YAMLEncoder(indent=indent, sort_keys=sort_keys, **kw).iterencode(obj) for chunk in iterable: fp.write(chunk) else: return dumps(obj, indent=indent, sort_keys=sort_keys, **kw)
[ "def", "dump", "(", "obj", ",", "fp", "=", "None", ",", "indent", "=", "None", ",", "sort_keys", "=", "False", ",", "**", "kw", ")", ":", "if", "fp", ":", "iterable", "=", "YAMLEncoder", "(", "indent", "=", "indent", ",", "sort_keys", "=", "sort_keys", ",", "**", "kw", ")", ".", "iterencode", "(", "obj", ")", "for", "chunk", "in", "iterable", ":", "fp", ".", "write", "(", "chunk", ")", "else", ":", "return", "dumps", "(", "obj", ",", "indent", "=", "indent", ",", "sort_keys", "=", "sort_keys", ",", "**", "kw", ")" ]
Dump object to a file like object or string. :param obj: :param fp: Open file like object :param int indent: Indent size, default 2 :param bool sort_keys: Optionally sort dictionary keys. :return: Yaml serialized data.
[ "Dump", "object", "to", "a", "file", "like", "object", "or", "string", "." ]
784830b907ca14525c4cecdb6ae35306f6f8a877
https://github.com/bionikspoon/pureyaml/blob/784830b907ca14525c4cecdb6ae35306f6f8a877/pureyaml/__init__.py#L28-L44
train
bionikspoon/pureyaml
pureyaml/__init__.py
dumps
def dumps(obj, indent=None, default=None, sort_keys=False, **kw): """Dump string.""" return YAMLEncoder(indent=indent, default=default, sort_keys=sort_keys, **kw).encode(obj)
python
def dumps(obj, indent=None, default=None, sort_keys=False, **kw): """Dump string.""" return YAMLEncoder(indent=indent, default=default, sort_keys=sort_keys, **kw).encode(obj)
[ "def", "dumps", "(", "obj", ",", "indent", "=", "None", ",", "default", "=", "None", ",", "sort_keys", "=", "False", ",", "**", "kw", ")", ":", "return", "YAMLEncoder", "(", "indent", "=", "indent", ",", "default", "=", "default", ",", "sort_keys", "=", "sort_keys", ",", "**", "kw", ")", ".", "encode", "(", "obj", ")" ]
Dump string.
[ "Dump", "string", "." ]
784830b907ca14525c4cecdb6ae35306f6f8a877
https://github.com/bionikspoon/pureyaml/blob/784830b907ca14525c4cecdb6ae35306f6f8a877/pureyaml/__init__.py#L47-L49
train
bionikspoon/pureyaml
pureyaml/__init__.py
load
def load(s, **kwargs): """Load yaml file""" try: return loads(s, **kwargs) except TypeError: return loads(s.read(), **kwargs)
python
def load(s, **kwargs): """Load yaml file""" try: return loads(s, **kwargs) except TypeError: return loads(s.read(), **kwargs)
[ "def", "load", "(", "s", ",", "**", "kwargs", ")", ":", "try", ":", "return", "loads", "(", "s", ",", "**", "kwargs", ")", "except", "TypeError", ":", "return", "loads", "(", "s", ".", "read", "(", ")", ",", "**", "kwargs", ")" ]
Load yaml file
[ "Load", "yaml", "file" ]
784830b907ca14525c4cecdb6ae35306f6f8a877
https://github.com/bionikspoon/pureyaml/blob/784830b907ca14525c4cecdb6ae35306f6f8a877/pureyaml/__init__.py#L52-L57
train
jaraco/jaraco.windows
jaraco/windows/api/inet.py
MIB_IPADDRROW.address
def address(self): "The address in big-endian" _ = struct.pack('L', self.address_num) return struct.unpack('!L', _)[0]
python
def address(self): "The address in big-endian" _ = struct.pack('L', self.address_num) return struct.unpack('!L', _)[0]
[ "def", "address", "(", "self", ")", ":", "\"The address in big-endian\"", "_", "=", "struct", ".", "pack", "(", "'L'", ",", "self", ".", "address_num", ")", "return", "struct", ".", "unpack", "(", "'!L'", ",", "_", ")", "[", "0", "]" ]
The address in big-endian
[ "The", "address", "in", "big", "-", "endian" ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/api/inet.py#L81-L84
train
NearHuscarl/py-currency
currency/currency.py
validate_currency
def validate_currency(*currencies): """ some validation checks before doing anything """ validated_currency = [] if not currencies: raise CurrencyException('My function need something to run, duh') for currency in currencies: currency = currency.upper() if not isinstance(currency, str): raise TypeError('Currency code should be a string: ' + repr(currency)) if currency not in _currencies: raise CurrencyException('Currency code not found: ' + repr(currency)) validated_currency.append(currency) return validated_currency[0] if len(validated_currency) == 1 else validated_currency
python
def validate_currency(*currencies): """ some validation checks before doing anything """ validated_currency = [] if not currencies: raise CurrencyException('My function need something to run, duh') for currency in currencies: currency = currency.upper() if not isinstance(currency, str): raise TypeError('Currency code should be a string: ' + repr(currency)) if currency not in _currencies: raise CurrencyException('Currency code not found: ' + repr(currency)) validated_currency.append(currency) return validated_currency[0] if len(validated_currency) == 1 else validated_currency
[ "def", "validate_currency", "(", "*", "currencies", ")", ":", "validated_currency", "=", "[", "]", "if", "not", "currencies", ":", "raise", "CurrencyException", "(", "'My function need something to run, duh'", ")", "for", "currency", "in", "currencies", ":", "currency", "=", "currency", ".", "upper", "(", ")", "if", "not", "isinstance", "(", "currency", ",", "str", ")", ":", "raise", "TypeError", "(", "'Currency code should be a string: '", "+", "repr", "(", "currency", ")", ")", "if", "currency", "not", "in", "_currencies", ":", "raise", "CurrencyException", "(", "'Currency code not found: '", "+", "repr", "(", "currency", ")", ")", "validated_currency", ".", "append", "(", "currency", ")", "return", "validated_currency", "[", "0", "]", "if", "len", "(", "validated_currency", ")", "==", "1", "else", "validated_currency" ]
some validation checks before doing anything
[ "some", "validation", "checks", "before", "doing", "anything" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L35-L47
train
NearHuscarl/py-currency
currency/currency.py
validate_price
def validate_price(price): """ validation checks for price argument """ if isinstance(price, str): try: price = int(price) except ValueError: # fallback if convert to int failed price = float(price) if not isinstance(price, (int, float)): raise TypeError('Price should be a number: ' + repr(price)) return price
python
def validate_price(price): """ validation checks for price argument """ if isinstance(price, str): try: price = int(price) except ValueError: # fallback if convert to int failed price = float(price) if not isinstance(price, (int, float)): raise TypeError('Price should be a number: ' + repr(price)) return price
[ "def", "validate_price", "(", "price", ")", ":", "if", "isinstance", "(", "price", ",", "str", ")", ":", "try", ":", "price", "=", "int", "(", "price", ")", "except", "ValueError", ":", "price", "=", "float", "(", "price", ")", "if", "not", "isinstance", "(", "price", ",", "(", "int", ",", "float", ")", ")", ":", "raise", "TypeError", "(", "'Price should be a number: '", "+", "repr", "(", "price", ")", ")", "return", "price" ]
validation checks for price argument
[ "validation", "checks", "for", "price", "argument" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L49-L58
train
NearHuscarl/py-currency
currency/currency.py
name
def name(currency, *, plural=False): """ return name of currency """ currency = validate_currency(currency) if plural: return _currencies[currency]['name_plural'] return _currencies[currency]['name']
python
def name(currency, *, plural=False): """ return name of currency """ currency = validate_currency(currency) if plural: return _currencies[currency]['name_plural'] return _currencies[currency]['name']
[ "def", "name", "(", "currency", ",", "*", ",", "plural", "=", "False", ")", ":", "currency", "=", "validate_currency", "(", "currency", ")", "if", "plural", ":", "return", "_currencies", "[", "currency", "]", "[", "'name_plural'", "]", "return", "_currencies", "[", "currency", "]", "[", "'name'", "]" ]
return name of currency
[ "return", "name", "of", "currency" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L70-L75
train
NearHuscarl/py-currency
currency/currency.py
symbol
def symbol(currency, *, native=True): """ return symbol of currency """ currency = validate_currency(currency) if native: return _currencies[currency]['symbol_native'] return _currencies[currency]['symbol']
python
def symbol(currency, *, native=True): """ return symbol of currency """ currency = validate_currency(currency) if native: return _currencies[currency]['symbol_native'] return _currencies[currency]['symbol']
[ "def", "symbol", "(", "currency", ",", "*", ",", "native", "=", "True", ")", ":", "currency", "=", "validate_currency", "(", "currency", ")", "if", "native", ":", "return", "_currencies", "[", "currency", "]", "[", "'symbol_native'", "]", "return", "_currencies", "[", "currency", "]", "[", "'symbol'", "]" ]
return symbol of currency
[ "return", "symbol", "of", "currency" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L77-L82
train
NearHuscarl/py-currency
currency/currency.py
rounding
def rounding(price, currency): """ rounding currency value based on its max decimal digits """ currency = validate_currency(currency) price = validate_price(price) if decimals(currency) == 0: return round(int(price), decimals(currency)) return round(price, decimals(currency))
python
def rounding(price, currency): """ rounding currency value based on its max decimal digits """ currency = validate_currency(currency) price = validate_price(price) if decimals(currency) == 0: return round(int(price), decimals(currency)) return round(price, decimals(currency))
[ "def", "rounding", "(", "price", ",", "currency", ")", ":", "currency", "=", "validate_currency", "(", "currency", ")", "price", "=", "validate_price", "(", "price", ")", "if", "decimals", "(", "currency", ")", "==", "0", ":", "return", "round", "(", "int", "(", "price", ")", ",", "decimals", "(", "currency", ")", ")", "return", "round", "(", "price", ",", "decimals", "(", "currency", ")", ")" ]
rounding currency value based on its max decimal digits
[ "rounding", "currency", "value", "based", "on", "its", "max", "decimal", "digits" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L94-L100
train
NearHuscarl/py-currency
currency/currency.py
check_update
def check_update(from_currency, to_currency): """ check if last update is over 30 mins ago. if so return True to update, else False """ if from_currency not in ccache: # if currency never get converted before ccache[from_currency] = {} if ccache[from_currency].get(to_currency) is None: ccache[from_currency][to_currency] = {'last_update': 0} last_update = float(ccache[from_currency][to_currency]['last_update']) if time.time() - last_update >= 30 * 60: # if last update is more than 30 min ago return True return False
python
def check_update(from_currency, to_currency): """ check if last update is over 30 mins ago. if so return True to update, else False """ if from_currency not in ccache: # if currency never get converted before ccache[from_currency] = {} if ccache[from_currency].get(to_currency) is None: ccache[from_currency][to_currency] = {'last_update': 0} last_update = float(ccache[from_currency][to_currency]['last_update']) if time.time() - last_update >= 30 * 60: # if last update is more than 30 min ago return True return False
[ "def", "check_update", "(", "from_currency", ",", "to_currency", ")", ":", "if", "from_currency", "not", "in", "ccache", ":", "ccache", "[", "from_currency", "]", "=", "{", "}", "if", "ccache", "[", "from_currency", "]", ".", "get", "(", "to_currency", ")", "is", "None", ":", "ccache", "[", "from_currency", "]", "[", "to_currency", "]", "=", "{", "'last_update'", ":", "0", "}", "last_update", "=", "float", "(", "ccache", "[", "from_currency", "]", "[", "to_currency", "]", "[", "'last_update'", "]", ")", "if", "time", ".", "time", "(", ")", "-", "last_update", ">=", "30", "*", "60", ":", "return", "True", "return", "False" ]
check if last update is over 30 mins ago. if so return True to update, else False
[ "check", "if", "last", "update", "is", "over", "30", "mins", "ago", ".", "if", "so", "return", "True", "to", "update", "else", "False" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L133-L142
train
NearHuscarl/py-currency
currency/currency.py
update_cache
def update_cache(from_currency, to_currency): """ update from_currency to_currency pair in cache if last update for that pair is over 30 minutes ago by request API info """ if check_update(from_currency, to_currency) is True: ccache[from_currency][to_currency]['value'] = convert_using_api(from_currency, to_currency) ccache[from_currency][to_currency]['last_update'] = time.time() cache.write(ccache)
python
def update_cache(from_currency, to_currency): """ update from_currency to_currency pair in cache if last update for that pair is over 30 minutes ago by request API info """ if check_update(from_currency, to_currency) is True: ccache[from_currency][to_currency]['value'] = convert_using_api(from_currency, to_currency) ccache[from_currency][to_currency]['last_update'] = time.time() cache.write(ccache)
[ "def", "update_cache", "(", "from_currency", ",", "to_currency", ")", ":", "if", "check_update", "(", "from_currency", ",", "to_currency", ")", "is", "True", ":", "ccache", "[", "from_currency", "]", "[", "to_currency", "]", "[", "'value'", "]", "=", "convert_using_api", "(", "from_currency", ",", "to_currency", ")", "ccache", "[", "from_currency", "]", "[", "to_currency", "]", "[", "'last_update'", "]", "=", "time", ".", "time", "(", ")", "cache", ".", "write", "(", "ccache", ")" ]
update from_currency to_currency pair in cache if last update for that pair is over 30 minutes ago by request API info
[ "update", "from_currency", "to_currency", "pair", "in", "cache", "if", "last", "update", "for", "that", "pair", "is", "over", "30", "minutes", "ago", "by", "request", "API", "info" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L144-L150
train
NearHuscarl/py-currency
currency/currency.py
convert_using_api
def convert_using_api(from_currency, to_currency): """ convert from from_currency to to_currency by requesting API """ convert_str = from_currency + '_' + to_currency options = {'compact': 'ultra', 'q': convert_str} api_url = 'https://free.currencyconverterapi.com/api/v5/convert' result = requests.get(api_url, params=options).json() return result[convert_str]
python
def convert_using_api(from_currency, to_currency): """ convert from from_currency to to_currency by requesting API """ convert_str = from_currency + '_' + to_currency options = {'compact': 'ultra', 'q': convert_str} api_url = 'https://free.currencyconverterapi.com/api/v5/convert' result = requests.get(api_url, params=options).json() return result[convert_str]
[ "def", "convert_using_api", "(", "from_currency", ",", "to_currency", ")", ":", "convert_str", "=", "from_currency", "+", "'_'", "+", "to_currency", "options", "=", "{", "'compact'", ":", "'ultra'", ",", "'q'", ":", "convert_str", "}", "api_url", "=", "'https://free.currencyconverterapi.com/api/v5/convert'", "result", "=", "requests", ".", "get", "(", "api_url", ",", "params", "=", "options", ")", ".", "json", "(", ")", "return", "result", "[", "convert_str", "]" ]
convert from from_currency to to_currency by requesting API
[ "convert", "from", "from_currency", "to", "to_currency", "by", "requesting", "API" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L152-L158
train
NearHuscarl/py-currency
currency/currency.py
convert
def convert(from_currency, to_currency, from_currency_price=1): """ convert from from_currency to to_currency using cached info """ get_cache() from_currency, to_currency = validate_currency(from_currency, to_currency) update_cache(from_currency, to_currency) return ccache[from_currency][to_currency]['value'] * from_currency_price
python
def convert(from_currency, to_currency, from_currency_price=1): """ convert from from_currency to to_currency using cached info """ get_cache() from_currency, to_currency = validate_currency(from_currency, to_currency) update_cache(from_currency, to_currency) return ccache[from_currency][to_currency]['value'] * from_currency_price
[ "def", "convert", "(", "from_currency", ",", "to_currency", ",", "from_currency_price", "=", "1", ")", ":", "get_cache", "(", ")", "from_currency", ",", "to_currency", "=", "validate_currency", "(", "from_currency", ",", "to_currency", ")", "update_cache", "(", "from_currency", ",", "to_currency", ")", "return", "ccache", "[", "from_currency", "]", "[", "to_currency", "]", "[", "'value'", "]", "*", "from_currency_price" ]
convert from from_currency to to_currency using cached info
[ "convert", "from", "from_currency", "to", "to_currency", "using", "cached", "info" ]
4e30426399872fd6bfaa4c752a91d67c2d7bf52c
https://github.com/NearHuscarl/py-currency/blob/4e30426399872fd6bfaa4c752a91d67c2d7bf52c/currency/currency.py#L160-L165
train
jaraco/jaraco.windows
jaraco/windows/timers.py
WaitableTimer.wait_for_signal
def wait_for_signal(self, timeout=None): """ wait for the signal; return after the signal has occurred or the timeout in seconds elapses. """ timeout_ms = int(timeout * 1000) if timeout else win32event.INFINITE win32event.WaitForSingleObject(self.signal_event, timeout_ms)
python
def wait_for_signal(self, timeout=None): """ wait for the signal; return after the signal has occurred or the timeout in seconds elapses. """ timeout_ms = int(timeout * 1000) if timeout else win32event.INFINITE win32event.WaitForSingleObject(self.signal_event, timeout_ms)
[ "def", "wait_for_signal", "(", "self", ",", "timeout", "=", "None", ")", ":", "timeout_ms", "=", "int", "(", "timeout", "*", "1000", ")", "if", "timeout", "else", "win32event", ".", "INFINITE", "win32event", ".", "WaitForSingleObject", "(", "self", ".", "signal_event", ",", "timeout_ms", ")" ]
wait for the signal; return after the signal has occurred or the timeout in seconds elapses.
[ "wait", "for", "the", "signal", ";", "return", "after", "the", "signal", "has", "occurred", "or", "the", "timeout", "in", "seconds", "elapses", "." ]
51811efed50b46ad08daa25408a1cc806bc8d519
https://github.com/jaraco/jaraco.windows/blob/51811efed50b46ad08daa25408a1cc806bc8d519/jaraco/windows/timers.py#L36-L42
train
Genida/django-meerkat
src/meerkat/utils/geolocation.py
ip_geoloc
def ip_geoloc(ip, hit_api=True): """ Get IP geolocation. Args: ip (str): IP address to use if no data provided. hit_api (bool): whether to hit api if info not found. Returns: str: latitude and longitude, comma-separated. """ from ..logs.models import IPInfoCheck try: obj = IPInfoCheck.objects.get(ip_address=ip).ip_info except IPInfoCheck.DoesNotExist: if hit_api: try: obj = IPInfoCheck.check_ip(ip) except RateExceededError: return None else: return None return obj.latitude, obj.longitude
python
def ip_geoloc(ip, hit_api=True): """ Get IP geolocation. Args: ip (str): IP address to use if no data provided. hit_api (bool): whether to hit api if info not found. Returns: str: latitude and longitude, comma-separated. """ from ..logs.models import IPInfoCheck try: obj = IPInfoCheck.objects.get(ip_address=ip).ip_info except IPInfoCheck.DoesNotExist: if hit_api: try: obj = IPInfoCheck.check_ip(ip) except RateExceededError: return None else: return None return obj.latitude, obj.longitude
[ "def", "ip_geoloc", "(", "ip", ",", "hit_api", "=", "True", ")", ":", "from", ".", ".", "logs", ".", "models", "import", "IPInfoCheck", "try", ":", "obj", "=", "IPInfoCheck", ".", "objects", ".", "get", "(", "ip_address", "=", "ip", ")", ".", "ip_info", "except", "IPInfoCheck", ".", "DoesNotExist", ":", "if", "hit_api", ":", "try", ":", "obj", "=", "IPInfoCheck", ".", "check_ip", "(", "ip", ")", "except", "RateExceededError", ":", "return", "None", "else", ":", "return", "None", "return", "obj", ".", "latitude", ",", "obj", ".", "longitude" ]
Get IP geolocation. Args: ip (str): IP address to use if no data provided. hit_api (bool): whether to hit api if info not found. Returns: str: latitude and longitude, comma-separated.
[ "Get", "IP", "geolocation", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/utils/geolocation.py#L8-L30
train
Genida/django-meerkat
src/meerkat/utils/geolocation.py
google_maps_geoloc_link
def google_maps_geoloc_link(data): """ Get a link to google maps pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to google maps pointing on this IP's geolocation. """ if isinstance(data, str): lat_lon = ip_geoloc(data) if lat_lon is None: return '' lat, lon = lat_lon else: lat, lon = data loc = '%s,%s' % (lat, lon) return 'https://www.google.com/maps/place/@%s,17z/' \ 'data=!3m1!4b1!4m5!3m4!1s0x0:0x0!8m2!3d%s!4d%s' % ( loc, lat, lon)
python
def google_maps_geoloc_link(data): """ Get a link to google maps pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to google maps pointing on this IP's geolocation. """ if isinstance(data, str): lat_lon = ip_geoloc(data) if lat_lon is None: return '' lat, lon = lat_lon else: lat, lon = data loc = '%s,%s' % (lat, lon) return 'https://www.google.com/maps/place/@%s,17z/' \ 'data=!3m1!4b1!4m5!3m4!1s0x0:0x0!8m2!3d%s!4d%s' % ( loc, lat, lon)
[ "def", "google_maps_geoloc_link", "(", "data", ")", ":", "if", "isinstance", "(", "data", ",", "str", ")", ":", "lat_lon", "=", "ip_geoloc", "(", "data", ")", "if", "lat_lon", "is", "None", ":", "return", "''", "lat", ",", "lon", "=", "lat_lon", "else", ":", "lat", ",", "lon", "=", "data", "loc", "=", "'%s,%s'", "%", "(", "lat", ",", "lon", ")", "return", "'https://www.google.com/maps/place/@%s,17z/'", "'data=!3m1!4b1!4m5!3m4!1s0x0:0x0!8m2!3d%s!4d%s'", "%", "(", "loc", ",", "lat", ",", "lon", ")" ]
Get a link to google maps pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to google maps pointing on this IP's geolocation.
[ "Get", "a", "link", "to", "google", "maps", "pointing", "on", "this", "IP", "s", "geolocation", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/utils/geolocation.py#L33-L53
train
Genida/django-meerkat
src/meerkat/utils/geolocation.py
open_street_map_geoloc_link
def open_street_map_geoloc_link(data): """ Get a link to open street map pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to open street map pointing on this IP's geolocation. """ if isinstance(data, str): lat_lon = ip_geoloc(data) if lat_lon is None: return '' lat, lon = lat_lon else: lat, lon = data return 'https://www.openstreetmap.org/search' \ '?query=%s%%2C%s#map=7/%s/%s' % (lat, lon, lat, lon)
python
def open_street_map_geoloc_link(data): """ Get a link to open street map pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to open street map pointing on this IP's geolocation. """ if isinstance(data, str): lat_lon = ip_geoloc(data) if lat_lon is None: return '' lat, lon = lat_lon else: lat, lon = data return 'https://www.openstreetmap.org/search' \ '?query=%s%%2C%s#map=7/%s/%s' % (lat, lon, lat, lon)
[ "def", "open_street_map_geoloc_link", "(", "data", ")", ":", "if", "isinstance", "(", "data", ",", "str", ")", ":", "lat_lon", "=", "ip_geoloc", "(", "data", ")", "if", "lat_lon", "is", "None", ":", "return", "''", "lat", ",", "lon", "=", "lat_lon", "else", ":", "lat", ",", "lon", "=", "data", "return", "'https://www.openstreetmap.org/search'", "'?query=%s%%2C%s#map=7/%s/%s'", "%", "(", "lat", ",", "lon", ",", "lat", ",", "lon", ")" ]
Get a link to open street map pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to open street map pointing on this IP's geolocation.
[ "Get", "a", "link", "to", "open", "street", "map", "pointing", "on", "this", "IP", "s", "geolocation", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/utils/geolocation.py#L56-L74
train
Genida/django-meerkat
src/meerkat/logs/charts.py
status_codes_chart
def status_codes_chart(): """Chart for status codes.""" stats = status_codes_stats() chart_options = { 'chart': { 'type': 'pie' }, 'title': { 'text': '' }, 'subtitle': { 'text': '' }, 'tooltip': { 'formatter': "return this.y + '/' + this.total + ' (' + " "Highcharts.numberFormat(this.percentage, 1) + '%)';" }, 'legend': { 'enabled': True, }, 'plotOptions': { 'pie': { 'allowPointSelect': True, 'cursor': 'pointer', 'dataLabels': { 'enabled': True, 'format': '<b>{point.name}</b>: {point.y}/{point.total} ' '({point.percentage:.1f}%)' }, 'showInLegend': True } }, 'series': [{ 'name': _('Status Codes'), 'colorByPoint': True, 'data': sorted( [{'name': '%s %s' % (k, STATUS_CODES[int(k)]['name']), 'y': v} for k, v in stats.items()], key=lambda x: x['y'], reverse=True) }] } return chart_options
python
def status_codes_chart(): """Chart for status codes.""" stats = status_codes_stats() chart_options = { 'chart': { 'type': 'pie' }, 'title': { 'text': '' }, 'subtitle': { 'text': '' }, 'tooltip': { 'formatter': "return this.y + '/' + this.total + ' (' + " "Highcharts.numberFormat(this.percentage, 1) + '%)';" }, 'legend': { 'enabled': True, }, 'plotOptions': { 'pie': { 'allowPointSelect': True, 'cursor': 'pointer', 'dataLabels': { 'enabled': True, 'format': '<b>{point.name}</b>: {point.y}/{point.total} ' '({point.percentage:.1f}%)' }, 'showInLegend': True } }, 'series': [{ 'name': _('Status Codes'), 'colorByPoint': True, 'data': sorted( [{'name': '%s %s' % (k, STATUS_CODES[int(k)]['name']), 'y': v} for k, v in stats.items()], key=lambda x: x['y'], reverse=True) }] } return chart_options
[ "def", "status_codes_chart", "(", ")", ":", "stats", "=", "status_codes_stats", "(", ")", "chart_options", "=", "{", "'chart'", ":", "{", "'type'", ":", "'pie'", "}", ",", "'title'", ":", "{", "'text'", ":", "''", "}", ",", "'subtitle'", ":", "{", "'text'", ":", "''", "}", ",", "'tooltip'", ":", "{", "'formatter'", ":", "\"return this.y + '/' + this.total + ' (' + \"", "\"Highcharts.numberFormat(this.percentage, 1) + '%)';\"", "}", ",", "'legend'", ":", "{", "'enabled'", ":", "True", ",", "}", ",", "'plotOptions'", ":", "{", "'pie'", ":", "{", "'allowPointSelect'", ":", "True", ",", "'cursor'", ":", "'pointer'", ",", "'dataLabels'", ":", "{", "'enabled'", ":", "True", ",", "'format'", ":", "'<b>{point.name}</b>: {point.y}/{point.total} '", "'({point.percentage:.1f}%)'", "}", ",", "'showInLegend'", ":", "True", "}", "}", ",", "'series'", ":", "[", "{", "'name'", ":", "_", "(", "'Status Codes'", ")", ",", "'colorByPoint'", ":", "True", ",", "'data'", ":", "sorted", "(", "[", "{", "'name'", ":", "'%s %s'", "%", "(", "k", ",", "STATUS_CODES", "[", "int", "(", "k", ")", "]", "[", "'name'", "]", ")", ",", "'y'", ":", "v", "}", "for", "k", ",", "v", "in", "stats", ".", "items", "(", ")", "]", ",", "key", "=", "lambda", "x", ":", "x", "[", "'y'", "]", ",", "reverse", "=", "True", ")", "}", "]", "}", "return", "chart_options" ]
Chart for status codes.
[ "Chart", "for", "status", "codes", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/logs/charts.py#L19-L63
train
Genida/django-meerkat
src/meerkat/logs/charts.py
most_visited_pages_legend_chart
def most_visited_pages_legend_chart(): """Chart for most visited pages legend.""" return { 'chart': { 'type': 'bar', 'height': 200, }, 'title': { 'text': _('Legend') }, 'xAxis': { 'categories': [ _('Project URL'), _('Old project URL'), _('Asset URL'), _('Old asset URL'), _('Common asset URL'), _('False-negative project URL'), _('Suspicious URL (potential attack)') ], 'title': { 'text': None } }, 'yAxis': { 'title': { 'text': None, 'align': 'high' }, 'labels': { 'overflow': 'justify' } }, 'tooltip': { 'enabled': False }, 'legend': { 'enabled': False }, 'credits': { 'enabled': False }, 'series': [{ 'name': _('Legend'), 'data': [ {'color': URL_TYPE_COLOR[PROJECT], 'y': 1}, {'color': URL_TYPE_COLOR[OLD_PROJECT], 'y': 1}, {'color': URL_TYPE_COLOR[ASSET], 'y': 1}, {'color': URL_TYPE_COLOR[OLD_ASSET], 'y': 1}, {'color': URL_TYPE_COLOR[COMMON_ASSET], 'y': 1}, {'color': URL_TYPE_COLOR[FALSE_NEGATIVE], 'y': 1}, {'color': URL_TYPE_COLOR[SUSPICIOUS], 'y': 1}, ] }] }
python
def most_visited_pages_legend_chart(): """Chart for most visited pages legend.""" return { 'chart': { 'type': 'bar', 'height': 200, }, 'title': { 'text': _('Legend') }, 'xAxis': { 'categories': [ _('Project URL'), _('Old project URL'), _('Asset URL'), _('Old asset URL'), _('Common asset URL'), _('False-negative project URL'), _('Suspicious URL (potential attack)') ], 'title': { 'text': None } }, 'yAxis': { 'title': { 'text': None, 'align': 'high' }, 'labels': { 'overflow': 'justify' } }, 'tooltip': { 'enabled': False }, 'legend': { 'enabled': False }, 'credits': { 'enabled': False }, 'series': [{ 'name': _('Legend'), 'data': [ {'color': URL_TYPE_COLOR[PROJECT], 'y': 1}, {'color': URL_TYPE_COLOR[OLD_PROJECT], 'y': 1}, {'color': URL_TYPE_COLOR[ASSET], 'y': 1}, {'color': URL_TYPE_COLOR[OLD_ASSET], 'y': 1}, {'color': URL_TYPE_COLOR[COMMON_ASSET], 'y': 1}, {'color': URL_TYPE_COLOR[FALSE_NEGATIVE], 'y': 1}, {'color': URL_TYPE_COLOR[SUSPICIOUS], 'y': 1}, ] }] }
[ "def", "most_visited_pages_legend_chart", "(", ")", ":", "return", "{", "'chart'", ":", "{", "'type'", ":", "'bar'", ",", "'height'", ":", "200", ",", "}", ",", "'title'", ":", "{", "'text'", ":", "_", "(", "'Legend'", ")", "}", ",", "'xAxis'", ":", "{", "'categories'", ":", "[", "_", "(", "'Project URL'", ")", ",", "_", "(", "'Old project URL'", ")", ",", "_", "(", "'Asset URL'", ")", ",", "_", "(", "'Old asset URL'", ")", ",", "_", "(", "'Common asset URL'", ")", ",", "_", "(", "'False-negative project URL'", ")", ",", "_", "(", "'Suspicious URL (potential attack)'", ")", "]", ",", "'title'", ":", "{", "'text'", ":", "None", "}", "}", ",", "'yAxis'", ":", "{", "'title'", ":", "{", "'text'", ":", "None", ",", "'align'", ":", "'high'", "}", ",", "'labels'", ":", "{", "'overflow'", ":", "'justify'", "}", "}", ",", "'tooltip'", ":", "{", "'enabled'", ":", "False", "}", ",", "'legend'", ":", "{", "'enabled'", ":", "False", "}", ",", "'credits'", ":", "{", "'enabled'", ":", "False", "}", ",", "'series'", ":", "[", "{", "'name'", ":", "_", "(", "'Legend'", ")", ",", "'data'", ":", "[", "{", "'color'", ":", "URL_TYPE_COLOR", "[", "PROJECT", "]", ",", "'y'", ":", "1", "}", ",", "{", "'color'", ":", "URL_TYPE_COLOR", "[", "OLD_PROJECT", "]", ",", "'y'", ":", "1", "}", ",", "{", "'color'", ":", "URL_TYPE_COLOR", "[", "ASSET", "]", ",", "'y'", ":", "1", "}", ",", "{", "'color'", ":", "URL_TYPE_COLOR", "[", "OLD_ASSET", "]", ",", "'y'", ":", "1", "}", ",", "{", "'color'", ":", "URL_TYPE_COLOR", "[", "COMMON_ASSET", "]", ",", "'y'", ":", "1", "}", ",", "{", "'color'", ":", "URL_TYPE_COLOR", "[", "FALSE_NEGATIVE", "]", ",", "'y'", ":", "1", "}", ",", "{", "'color'", ":", "URL_TYPE_COLOR", "[", "SUSPICIOUS", "]", ",", "'y'", ":", "1", "}", ",", "]", "}", "]", "}" ]
Chart for most visited pages legend.
[ "Chart", "for", "most", "visited", "pages", "legend", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/logs/charts.py#L100-L154
train
dcramer/logan
logan/settings.py
add_settings
def add_settings(mod, allow_extras=True, settings=django_settings): """ Adds all settings that are part of ``mod`` to the global settings object. Special cases ``EXTRA_APPS`` to append the specified applications to the list of ``INSTALLED_APPS``. """ extras = {} for setting in dir(mod): if setting == setting.upper(): setting_value = getattr(mod, setting) if setting in TUPLE_SETTINGS and type(setting_value) == str: setting_value = (setting_value,) # In case the user forgot the comma. # Any setting that starts with EXTRA_ and matches a setting that is a list or tuple # will automatically append the values to the current setting. # It might make sense to make this less magical if setting.startswith('EXTRA_'): base_setting = setting.split('EXTRA_', 1)[-1] if isinstance(getattr(settings, base_setting), (list, tuple)): extras[base_setting] = setting_value continue setattr(settings, setting, setting_value) for key, value in extras.items(): curval = getattr(settings, key) setattr(settings, key, curval + type(curval)(value))
python
def add_settings(mod, allow_extras=True, settings=django_settings): """ Adds all settings that are part of ``mod`` to the global settings object. Special cases ``EXTRA_APPS`` to append the specified applications to the list of ``INSTALLED_APPS``. """ extras = {} for setting in dir(mod): if setting == setting.upper(): setting_value = getattr(mod, setting) if setting in TUPLE_SETTINGS and type(setting_value) == str: setting_value = (setting_value,) # In case the user forgot the comma. # Any setting that starts with EXTRA_ and matches a setting that is a list or tuple # will automatically append the values to the current setting. # It might make sense to make this less magical if setting.startswith('EXTRA_'): base_setting = setting.split('EXTRA_', 1)[-1] if isinstance(getattr(settings, base_setting), (list, tuple)): extras[base_setting] = setting_value continue setattr(settings, setting, setting_value) for key, value in extras.items(): curval = getattr(settings, key) setattr(settings, key, curval + type(curval)(value))
[ "def", "add_settings", "(", "mod", ",", "allow_extras", "=", "True", ",", "settings", "=", "django_settings", ")", ":", "extras", "=", "{", "}", "for", "setting", "in", "dir", "(", "mod", ")", ":", "if", "setting", "==", "setting", ".", "upper", "(", ")", ":", "setting_value", "=", "getattr", "(", "mod", ",", "setting", ")", "if", "setting", "in", "TUPLE_SETTINGS", "and", "type", "(", "setting_value", ")", "==", "str", ":", "setting_value", "=", "(", "setting_value", ",", ")", "if", "setting", ".", "startswith", "(", "'EXTRA_'", ")", ":", "base_setting", "=", "setting", ".", "split", "(", "'EXTRA_'", ",", "1", ")", "[", "-", "1", "]", "if", "isinstance", "(", "getattr", "(", "settings", ",", "base_setting", ")", ",", "(", "list", ",", "tuple", ")", ")", ":", "extras", "[", "base_setting", "]", "=", "setting_value", "continue", "setattr", "(", "settings", ",", "setting", ",", "setting_value", ")", "for", "key", ",", "value", "in", "extras", ".", "items", "(", ")", ":", "curval", "=", "getattr", "(", "settings", ",", "key", ")", "setattr", "(", "settings", ",", "key", ",", "curval", "+", "type", "(", "curval", ")", "(", "value", ")", ")" ]
Adds all settings that are part of ``mod`` to the global settings object. Special cases ``EXTRA_APPS`` to append the specified applications to the list of ``INSTALLED_APPS``.
[ "Adds", "all", "settings", "that", "are", "part", "of", "mod", "to", "the", "global", "settings", "object", "." ]
8b18456802d631a822e2823bf9a4e9810a15a20e
https://github.com/dcramer/logan/blob/8b18456802d631a822e2823bf9a4e9810a15a20e/logan/settings.py#L73-L101
train
Genida/django-meerkat
src/meerkat/sites.py
DashboardSite.get_urls
def get_urls(self): """ Get urls method. Returns: list: the list of url objects. """ urls = super(DashboardSite, self).get_urls() custom_urls = [ url(r'^$', self.admin_view(HomeView.as_view()), name='index'), url(r'^logs/', include(logs_urlpatterns(self.admin_view))), ] custom_urls += get_realtime_urls(self.admin_view) del urls[0] return custom_urls + urls
python
def get_urls(self): """ Get urls method. Returns: list: the list of url objects. """ urls = super(DashboardSite, self).get_urls() custom_urls = [ url(r'^$', self.admin_view(HomeView.as_view()), name='index'), url(r'^logs/', include(logs_urlpatterns(self.admin_view))), ] custom_urls += get_realtime_urls(self.admin_view) del urls[0] return custom_urls + urls
[ "def", "get_urls", "(", "self", ")", ":", "urls", "=", "super", "(", "DashboardSite", ",", "self", ")", ".", "get_urls", "(", ")", "custom_urls", "=", "[", "url", "(", "r'^$'", ",", "self", ".", "admin_view", "(", "HomeView", ".", "as_view", "(", ")", ")", ",", "name", "=", "'index'", ")", ",", "url", "(", "r'^logs/'", ",", "include", "(", "logs_urlpatterns", "(", "self", ".", "admin_view", ")", ")", ")", ",", "]", "custom_urls", "+=", "get_realtime_urls", "(", "self", ".", "admin_view", ")", "del", "urls", "[", "0", "]", "return", "custom_urls", "+", "urls" ]
Get urls method. Returns: list: the list of url objects.
[ "Get", "urls", "method", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/sites.py#L23-L41
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
add_form_widget_attr
def add_form_widget_attr(field, attr_name, attr_value, replace=0): """ Adds widget attributes to a bound form field. This is helpful if you would like to add a certain class to all your forms (i.e. `form-control` to all form fields when you are using Bootstrap):: {% load libs_tags %} {% for field in form.fields %} {% add_form_widget_attr field 'class' 'form-control' as field_ %} {{ field_ }} {% endfor %} The tag will check if the attr already exists and only append your value. If you would like to replace existing attrs, set `replace=1`:: {% add_form_widget_attr field 'class' 'form-control' replace=1 as field_ %} """ if not replace: attr = field.field.widget.attrs.get(attr_name, '') attr += force_text(attr_value) field.field.widget.attrs[attr_name] = attr return field else: field.field.widget.attrs[attr_name] = attr_value return field
python
def add_form_widget_attr(field, attr_name, attr_value, replace=0): """ Adds widget attributes to a bound form field. This is helpful if you would like to add a certain class to all your forms (i.e. `form-control` to all form fields when you are using Bootstrap):: {% load libs_tags %} {% for field in form.fields %} {% add_form_widget_attr field 'class' 'form-control' as field_ %} {{ field_ }} {% endfor %} The tag will check if the attr already exists and only append your value. If you would like to replace existing attrs, set `replace=1`:: {% add_form_widget_attr field 'class' 'form-control' replace=1 as field_ %} """ if not replace: attr = field.field.widget.attrs.get(attr_name, '') attr += force_text(attr_value) field.field.widget.attrs[attr_name] = attr return field else: field.field.widget.attrs[attr_name] = attr_value return field
[ "def", "add_form_widget_attr", "(", "field", ",", "attr_name", ",", "attr_value", ",", "replace", "=", "0", ")", ":", "if", "not", "replace", ":", "attr", "=", "field", ".", "field", ".", "widget", ".", "attrs", ".", "get", "(", "attr_name", ",", "''", ")", "attr", "+=", "force_text", "(", "attr_value", ")", "field", ".", "field", ".", "widget", ".", "attrs", "[", "attr_name", "]", "=", "attr", "return", "field", "else", ":", "field", ".", "field", ".", "widget", ".", "attrs", "[", "attr_name", "]", "=", "attr_value", "return", "field" ]
Adds widget attributes to a bound form field. This is helpful if you would like to add a certain class to all your forms (i.e. `form-control` to all form fields when you are using Bootstrap):: {% load libs_tags %} {% for field in form.fields %} {% add_form_widget_attr field 'class' 'form-control' as field_ %} {{ field_ }} {% endfor %} The tag will check if the attr already exists and only append your value. If you would like to replace existing attrs, set `replace=1`:: {% add_form_widget_attr field 'class' 'form-control' replace=1 as field_ %}
[ "Adds", "widget", "attributes", "to", "a", "bound", "form", "field", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L32-L60
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
block_anyfilter
def block_anyfilter(parser, token): """ Turn any template filter into a blocktag. Usage:: {% load libs_tags %} {% block_anyfilter django.template.defaultfilters.truncatewords_html 15 %} // Something complex that generates html output {% endblockanyfilter %} """ bits = token.contents.split() nodelist = parser.parse(('endblockanyfilter',)) parser.delete_first_token() return BlockAnyFilterNode(nodelist, bits[1], *bits[2:])
python
def block_anyfilter(parser, token): """ Turn any template filter into a blocktag. Usage:: {% load libs_tags %} {% block_anyfilter django.template.defaultfilters.truncatewords_html 15 %} // Something complex that generates html output {% endblockanyfilter %} """ bits = token.contents.split() nodelist = parser.parse(('endblockanyfilter',)) parser.delete_first_token() return BlockAnyFilterNode(nodelist, bits[1], *bits[2:])
[ "def", "block_anyfilter", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "contents", ".", "split", "(", ")", "nodelist", "=", "parser", ".", "parse", "(", "(", "'endblockanyfilter'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "return", "BlockAnyFilterNode", "(", "nodelist", ",", "bits", "[", "1", "]", ",", "*", "bits", "[", "2", ":", "]", ")" ]
Turn any template filter into a blocktag. Usage:: {% load libs_tags %} {% block_anyfilter django.template.defaultfilters.truncatewords_html 15 %} // Something complex that generates html output {% endblockanyfilter %}
[ "Turn", "any", "template", "filter", "into", "a", "blocktag", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L64-L79
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
calculate_dimensions
def calculate_dimensions(image, long_side, short_side): """Returns the thumbnail dimensions depending on the images format.""" if image.width >= image.height: return '{0}x{1}'.format(long_side, short_side) return '{0}x{1}'.format(short_side, long_side)
python
def calculate_dimensions(image, long_side, short_side): """Returns the thumbnail dimensions depending on the images format.""" if image.width >= image.height: return '{0}x{1}'.format(long_side, short_side) return '{0}x{1}'.format(short_side, long_side)
[ "def", "calculate_dimensions", "(", "image", ",", "long_side", ",", "short_side", ")", ":", "if", "image", ".", "width", ">=", "image", ".", "height", ":", "return", "'{0}x{1}'", ".", "format", "(", "long_side", ",", "short_side", ")", "return", "'{0}x{1}'", ".", "format", "(", "short_side", ",", "long_side", ")" ]
Returns the thumbnail dimensions depending on the images format.
[ "Returns", "the", "thumbnail", "dimensions", "depending", "on", "the", "images", "format", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L95-L99
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
call
def call(obj, method, *args, **kwargs): """ Allows to call any method of any object with parameters. Because come on! It's bloody stupid that Django's templating engine doesn't allow that. Usage:: {% call myobj 'mymethod' myvar foobar=myvar2 as result %} {% call myobj 'mydict' 'mykey' as result %} {% call myobj 'myattribute' as result %} :param obj: The object which has the method that you would like to call :param method: A string representing the attribute on the object that should be called. """ function_or_dict_or_member = getattr(obj, method) if callable(function_or_dict_or_member): # If it is a function, let's call it return function_or_dict_or_member(*args, **kwargs) if not len(args): # If it is a member, lets return it return function_or_dict_or_member # If it is a dict, let's access one of it's keys return function_or_dict_or_member[args[0]]
python
def call(obj, method, *args, **kwargs): """ Allows to call any method of any object with parameters. Because come on! It's bloody stupid that Django's templating engine doesn't allow that. Usage:: {% call myobj 'mymethod' myvar foobar=myvar2 as result %} {% call myobj 'mydict' 'mykey' as result %} {% call myobj 'myattribute' as result %} :param obj: The object which has the method that you would like to call :param method: A string representing the attribute on the object that should be called. """ function_or_dict_or_member = getattr(obj, method) if callable(function_or_dict_or_member): # If it is a function, let's call it return function_or_dict_or_member(*args, **kwargs) if not len(args): # If it is a member, lets return it return function_or_dict_or_member # If it is a dict, let's access one of it's keys return function_or_dict_or_member[args[0]]
[ "def", "call", "(", "obj", ",", "method", ",", "*", "args", ",", "**", "kwargs", ")", ":", "function_or_dict_or_member", "=", "getattr", "(", "obj", ",", "method", ")", "if", "callable", "(", "function_or_dict_or_member", ")", ":", "return", "function_or_dict_or_member", "(", "*", "args", ",", "**", "kwargs", ")", "if", "not", "len", "(", "args", ")", ":", "return", "function_or_dict_or_member", "return", "function_or_dict_or_member", "[", "args", "[", "0", "]", "]" ]
Allows to call any method of any object with parameters. Because come on! It's bloody stupid that Django's templating engine doesn't allow that. Usage:: {% call myobj 'mymethod' myvar foobar=myvar2 as result %} {% call myobj 'mydict' 'mykey' as result %} {% call myobj 'myattribute' as result %} :param obj: The object which has the method that you would like to call :param method: A string representing the attribute on the object that should be called.
[ "Allows", "to", "call", "any", "method", "of", "any", "object", "with", "parameters", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L103-L129
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
concatenate
def concatenate(*args, **kwargs): """ Concatenates the given strings. Usage:: {% load libs_tags %} {% concatenate "foo" "bar" as new_string %} {% concatenate "foo" "bar" divider="_" as another_string %} The above would result in the strings "foobar" and "foo_bar". """ divider = kwargs.get('divider', '') result = '' for arg in args: if result == '': result += arg else: result += '{0}{1}'.format(divider, arg) return result
python
def concatenate(*args, **kwargs): """ Concatenates the given strings. Usage:: {% load libs_tags %} {% concatenate "foo" "bar" as new_string %} {% concatenate "foo" "bar" divider="_" as another_string %} The above would result in the strings "foobar" and "foo_bar". """ divider = kwargs.get('divider', '') result = '' for arg in args: if result == '': result += arg else: result += '{0}{1}'.format(divider, arg) return result
[ "def", "concatenate", "(", "*", "args", ",", "**", "kwargs", ")", ":", "divider", "=", "kwargs", ".", "get", "(", "'divider'", ",", "''", ")", "result", "=", "''", "for", "arg", "in", "args", ":", "if", "result", "==", "''", ":", "result", "+=", "arg", "else", ":", "result", "+=", "'{0}{1}'", ".", "format", "(", "divider", ",", "arg", ")", "return", "result" ]
Concatenates the given strings. Usage:: {% load libs_tags %} {% concatenate "foo" "bar" as new_string %} {% concatenate "foo" "bar" divider="_" as another_string %} The above would result in the strings "foobar" and "foo_bar".
[ "Concatenates", "the", "given", "strings", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L133-L153
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
get_content_type
def get_content_type(obj, field_name=False): """ Returns the content type of an object. :param obj: A model instance. :param field_name: Field of the object to return. """ content_type = ContentType.objects.get_for_model(obj) if field_name: return getattr(content_type, field_name, '') return content_type
python
def get_content_type(obj, field_name=False): """ Returns the content type of an object. :param obj: A model instance. :param field_name: Field of the object to return. """ content_type = ContentType.objects.get_for_model(obj) if field_name: return getattr(content_type, field_name, '') return content_type
[ "def", "get_content_type", "(", "obj", ",", "field_name", "=", "False", ")", ":", "content_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "obj", ")", "if", "field_name", ":", "return", "getattr", "(", "content_type", ",", "field_name", ",", "''", ")", "return", "content_type" ]
Returns the content type of an object. :param obj: A model instance. :param field_name: Field of the object to return.
[ "Returns", "the", "content", "type", "of", "an", "object", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L157-L168
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
get_verbose
def get_verbose(obj, field_name=""): """ Returns the verbose name of an object's field. :param obj: A model instance. :param field_name: The requested field value in string format. """ if hasattr(obj, "_meta") and hasattr(obj._meta, "get_field_by_name"): try: return obj._meta.get_field(field_name).verbose_name except FieldDoesNotExist: pass return ""
python
def get_verbose(obj, field_name=""): """ Returns the verbose name of an object's field. :param obj: A model instance. :param field_name: The requested field value in string format. """ if hasattr(obj, "_meta") and hasattr(obj._meta, "get_field_by_name"): try: return obj._meta.get_field(field_name).verbose_name except FieldDoesNotExist: pass return ""
[ "def", "get_verbose", "(", "obj", ",", "field_name", "=", "\"\"", ")", ":", "if", "hasattr", "(", "obj", ",", "\"_meta\"", ")", "and", "hasattr", "(", "obj", ".", "_meta", ",", "\"get_field_by_name\"", ")", ":", "try", ":", "return", "obj", ".", "_meta", ".", "get_field", "(", "field_name", ")", ".", "verbose_name", "except", "FieldDoesNotExist", ":", "pass", "return", "\"\"" ]
Returns the verbose name of an object's field. :param obj: A model instance. :param field_name: The requested field value in string format.
[ "Returns", "the", "verbose", "name", "of", "an", "object", "s", "field", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L200-L213
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
get_query_params
def get_query_params(request, *args): """ Allows to change one of the URL get parameter while keeping all the others. Usage:: {% load libs_tags %} {% get_query_params request "page" page_obj.next_page_number as query %} <a href="?{{ query }}">Next</a> You can also pass in several pairs of keys and values:: {% get_query_params request "page" 1 "foobar" 2 as query %} You often need this when you have a paginated set of objects with filters. Your url would look something like ``/?region=1&gender=m``. Your paginator needs to create links with ``&page=2`` in them but you must keep the filter values when switching pages. :param request: The request instance. :param *args: Make sure to always pass in paris of args. One is the key, one is the value. If you set the value of a key to "!remove" that parameter will not be included in the returned query. """ query = request.GET.copy() index = 1 key = '' for arg in args: if index % 2 != 0: key = arg else: if arg == "!remove": try: query.pop(key) except KeyError: pass else: query[key] = arg index += 1 return query.urlencode()
python
def get_query_params(request, *args): """ Allows to change one of the URL get parameter while keeping all the others. Usage:: {% load libs_tags %} {% get_query_params request "page" page_obj.next_page_number as query %} <a href="?{{ query }}">Next</a> You can also pass in several pairs of keys and values:: {% get_query_params request "page" 1 "foobar" 2 as query %} You often need this when you have a paginated set of objects with filters. Your url would look something like ``/?region=1&gender=m``. Your paginator needs to create links with ``&page=2`` in them but you must keep the filter values when switching pages. :param request: The request instance. :param *args: Make sure to always pass in paris of args. One is the key, one is the value. If you set the value of a key to "!remove" that parameter will not be included in the returned query. """ query = request.GET.copy() index = 1 key = '' for arg in args: if index % 2 != 0: key = arg else: if arg == "!remove": try: query.pop(key) except KeyError: pass else: query[key] = arg index += 1 return query.urlencode()
[ "def", "get_query_params", "(", "request", ",", "*", "args", ")", ":", "query", "=", "request", ".", "GET", ".", "copy", "(", ")", "index", "=", "1", "key", "=", "''", "for", "arg", "in", "args", ":", "if", "index", "%", "2", "!=", "0", ":", "key", "=", "arg", "else", ":", "if", "arg", "==", "\"!remove\"", ":", "try", ":", "query", ".", "pop", "(", "key", ")", "except", "KeyError", ":", "pass", "else", ":", "query", "[", "key", "]", "=", "arg", "index", "+=", "1", "return", "query", ".", "urlencode", "(", ")" ]
Allows to change one of the URL get parameter while keeping all the others. Usage:: {% load libs_tags %} {% get_query_params request "page" page_obj.next_page_number as query %} <a href="?{{ query }}">Next</a> You can also pass in several pairs of keys and values:: {% get_query_params request "page" 1 "foobar" 2 as query %} You often need this when you have a paginated set of objects with filters. Your url would look something like ``/?region=1&gender=m``. Your paginator needs to create links with ``&page=2`` in them but you must keep the filter values when switching pages. :param request: The request instance. :param *args: Make sure to always pass in paris of args. One is the key, one is the value. If you set the value of a key to "!remove" that parameter will not be included in the returned query.
[ "Allows", "to", "change", "one", "of", "the", "URL", "get", "parameter", "while", "keeping", "all", "the", "others", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L217-L258
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
navactive
def navactive(request, url, exact=0, use_resolver=1): """ Returns ``active`` if the given URL is in the url path, otherwise ''. Usage:: {% load libs_tags %} ... <li class="{% navactive request "/news/" exact=1 %}"> :param request: A request instance. :param url: A string representing a part of the URL that needs to exist in order for this method to return ``True``. :param exact: If ``1`` then the parameter ``url`` must be equal to ``request.path``, otherwise the parameter ``url`` can just be a part of ``request.path``. :use_resolver: If ``0`` we will not try to compare ``url`` with existing view names but we will only compare it with ``request.path``. """ if use_resolver: try: if url == resolve(request.path).url_name: # Checks the url pattern in case a view_name is posted return 'active' elif url == request.path: # Workaround to catch URLs with more than one part, which don't # raise a Resolver404 (e.g. '/index/info/') match = request.path else: return '' except Resolver404: # Indicates, that a simple url string is used (e.g. '/index/') match = request.path else: match = request.path if exact and url == match: return 'active' elif not exact and url in request.path: return 'active' return ''
python
def navactive(request, url, exact=0, use_resolver=1): """ Returns ``active`` if the given URL is in the url path, otherwise ''. Usage:: {% load libs_tags %} ... <li class="{% navactive request "/news/" exact=1 %}"> :param request: A request instance. :param url: A string representing a part of the URL that needs to exist in order for this method to return ``True``. :param exact: If ``1`` then the parameter ``url`` must be equal to ``request.path``, otherwise the parameter ``url`` can just be a part of ``request.path``. :use_resolver: If ``0`` we will not try to compare ``url`` with existing view names but we will only compare it with ``request.path``. """ if use_resolver: try: if url == resolve(request.path).url_name: # Checks the url pattern in case a view_name is posted return 'active' elif url == request.path: # Workaround to catch URLs with more than one part, which don't # raise a Resolver404 (e.g. '/index/info/') match = request.path else: return '' except Resolver404: # Indicates, that a simple url string is used (e.g. '/index/') match = request.path else: match = request.path if exact and url == match: return 'active' elif not exact and url in request.path: return 'active' return ''
[ "def", "navactive", "(", "request", ",", "url", ",", "exact", "=", "0", ",", "use_resolver", "=", "1", ")", ":", "if", "use_resolver", ":", "try", ":", "if", "url", "==", "resolve", "(", "request", ".", "path", ")", ".", "url_name", ":", "return", "'active'", "elif", "url", "==", "request", ".", "path", ":", "match", "=", "request", ".", "path", "else", ":", "return", "''", "except", "Resolver404", ":", "match", "=", "request", ".", "path", "else", ":", "match", "=", "request", ".", "path", "if", "exact", "and", "url", "==", "match", ":", "return", "'active'", "elif", "not", "exact", "and", "url", "in", "request", ".", "path", ":", "return", "'active'", "return", "''" ]
Returns ``active`` if the given URL is in the url path, otherwise ''. Usage:: {% load libs_tags %} ... <li class="{% navactive request "/news/" exact=1 %}"> :param request: A request instance. :param url: A string representing a part of the URL that needs to exist in order for this method to return ``True``. :param exact: If ``1`` then the parameter ``url`` must be equal to ``request.path``, otherwise the parameter ``url`` can just be a part of ``request.path``. :use_resolver: If ``0`` we will not try to compare ``url`` with existing view names but we will only compare it with ``request.path``.
[ "Returns", "active", "if", "the", "given", "URL", "is", "in", "the", "url", "path", "otherwise", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L290-L331
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
get_range_around
def get_range_around(range_value, current_item, padding): """ Returns a range of numbers around the given number. This is useful for pagination, where you might want to show something like this:: << < ... 4 5 (6) 7 8 .. > >> In this example `6` would be the current page and we show 2 items around that page (including the page itself). Usage:: {% load libs_tags %} {% get_range_around page_obj.paginator.num_pages page_obj.number 5 as pages %} :param range_amount: Number of total items in your range (1 indexed) :param current_item: The item around which the result should be centered (1 indexed) :param padding: Number of items to show left and right from the current item. """ total_items = 1 + padding * 2 left_bound = padding right_bound = range_value - padding if range_value <= total_items: range_items = range(1, range_value + 1) return { 'range_items': range_items, 'left_padding': False, 'right_padding': False, } if current_item <= left_bound: range_items = range(1, range_value + 1)[:total_items] return { 'range_items': range_items, 'left_padding': range_items[0] > 1, 'right_padding': range_items[-1] < range_value, } if current_item >= right_bound: range_items = range(1, range_value + 1)[-total_items:] return { 'range_items': range_items, 'left_padding': range_items[0] > 1, 'right_padding': range_items[-1] < range_value, } range_items = range(current_item - padding, current_item + padding + 1) return { 'range_items': range_items, 'left_padding': True, 'right_padding': True, }
python
def get_range_around(range_value, current_item, padding): """ Returns a range of numbers around the given number. This is useful for pagination, where you might want to show something like this:: << < ... 4 5 (6) 7 8 .. > >> In this example `6` would be the current page and we show 2 items around that page (including the page itself). Usage:: {% load libs_tags %} {% get_range_around page_obj.paginator.num_pages page_obj.number 5 as pages %} :param range_amount: Number of total items in your range (1 indexed) :param current_item: The item around which the result should be centered (1 indexed) :param padding: Number of items to show left and right from the current item. """ total_items = 1 + padding * 2 left_bound = padding right_bound = range_value - padding if range_value <= total_items: range_items = range(1, range_value + 1) return { 'range_items': range_items, 'left_padding': False, 'right_padding': False, } if current_item <= left_bound: range_items = range(1, range_value + 1)[:total_items] return { 'range_items': range_items, 'left_padding': range_items[0] > 1, 'right_padding': range_items[-1] < range_value, } if current_item >= right_bound: range_items = range(1, range_value + 1)[-total_items:] return { 'range_items': range_items, 'left_padding': range_items[0] > 1, 'right_padding': range_items[-1] < range_value, } range_items = range(current_item - padding, current_item + padding + 1) return { 'range_items': range_items, 'left_padding': True, 'right_padding': True, }
[ "def", "get_range_around", "(", "range_value", ",", "current_item", ",", "padding", ")", ":", "total_items", "=", "1", "+", "padding", "*", "2", "left_bound", "=", "padding", "right_bound", "=", "range_value", "-", "padding", "if", "range_value", "<=", "total_items", ":", "range_items", "=", "range", "(", "1", ",", "range_value", "+", "1", ")", "return", "{", "'range_items'", ":", "range_items", ",", "'left_padding'", ":", "False", ",", "'right_padding'", ":", "False", ",", "}", "if", "current_item", "<=", "left_bound", ":", "range_items", "=", "range", "(", "1", ",", "range_value", "+", "1", ")", "[", ":", "total_items", "]", "return", "{", "'range_items'", ":", "range_items", ",", "'left_padding'", ":", "range_items", "[", "0", "]", ">", "1", ",", "'right_padding'", ":", "range_items", "[", "-", "1", "]", "<", "range_value", ",", "}", "if", "current_item", ">=", "right_bound", ":", "range_items", "=", "range", "(", "1", ",", "range_value", "+", "1", ")", "[", "-", "total_items", ":", "]", "return", "{", "'range_items'", ":", "range_items", ",", "'left_padding'", ":", "range_items", "[", "0", "]", ">", "1", ",", "'right_padding'", ":", "range_items", "[", "-", "1", "]", "<", "range_value", ",", "}", "range_items", "=", "range", "(", "current_item", "-", "padding", ",", "current_item", "+", "padding", "+", "1", ")", "return", "{", "'range_items'", ":", "range_items", ",", "'left_padding'", ":", "True", ",", "'right_padding'", ":", "True", ",", "}" ]
Returns a range of numbers around the given number. This is useful for pagination, where you might want to show something like this:: << < ... 4 5 (6) 7 8 .. > >> In this example `6` would be the current page and we show 2 items around that page (including the page itself). Usage:: {% load libs_tags %} {% get_range_around page_obj.paginator.num_pages page_obj.number 5 as pages %} :param range_amount: Number of total items in your range (1 indexed) :param current_item: The item around which the result should be centered (1 indexed) :param padding: Number of items to show left and right from the current item.
[ "Returns", "a", "range", "of", "numbers", "around", "the", "given", "number", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L364-L420
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
sum
def sum(context, key, value, multiplier=1): """ Adds the given value to the total value currently held in ``key``. Use the multiplier if you want to turn a positive value into a negative and actually substract from the current total sum. Usage:: {% sum "MY_TOTAL" 42 -1 %} {{ MY_TOTAL }} """ if key not in context.dicts[0]: context.dicts[0][key] = 0 context.dicts[0][key] += value * multiplier return ''
python
def sum(context, key, value, multiplier=1): """ Adds the given value to the total value currently held in ``key``. Use the multiplier if you want to turn a positive value into a negative and actually substract from the current total sum. Usage:: {% sum "MY_TOTAL" 42 -1 %} {{ MY_TOTAL }} """ if key not in context.dicts[0]: context.dicts[0][key] = 0 context.dicts[0][key] += value * multiplier return ''
[ "def", "sum", "(", "context", ",", "key", ",", "value", ",", "multiplier", "=", "1", ")", ":", "if", "key", "not", "in", "context", ".", "dicts", "[", "0", "]", ":", "context", ".", "dicts", "[", "0", "]", "[", "key", "]", "=", "0", "context", ".", "dicts", "[", "0", "]", "[", "key", "]", "+=", "value", "*", "multiplier", "return", "''" ]
Adds the given value to the total value currently held in ``key``. Use the multiplier if you want to turn a positive value into a negative and actually substract from the current total sum. Usage:: {% sum "MY_TOTAL" 42 -1 %} {{ MY_TOTAL }}
[ "Adds", "the", "given", "value", "to", "the", "total", "value", "currently", "held", "in", "key", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L468-L484
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
verbatim
def verbatim(parser, token): """Tag to render x-tmpl templates with Django template code.""" text = [] while 1: token = parser.tokens.pop(0) if token.contents == 'endverbatim': break if token.token_type == TOKEN_VAR: text.append('{{ ') elif token.token_type == TOKEN_BLOCK: text.append('{%') text.append(token.contents) if token.token_type == TOKEN_VAR: text.append(' }}') elif token.token_type == TOKEN_BLOCK: if not text[-1].startswith('='): text[-1:-1] = [' '] text.append(' %}') return VerbatimNode(''.join(text))
python
def verbatim(parser, token): """Tag to render x-tmpl templates with Django template code.""" text = [] while 1: token = parser.tokens.pop(0) if token.contents == 'endverbatim': break if token.token_type == TOKEN_VAR: text.append('{{ ') elif token.token_type == TOKEN_BLOCK: text.append('{%') text.append(token.contents) if token.token_type == TOKEN_VAR: text.append(' }}') elif token.token_type == TOKEN_BLOCK: if not text[-1].startswith('='): text[-1:-1] = [' '] text.append(' %}') return VerbatimNode(''.join(text))
[ "def", "verbatim", "(", "parser", ",", "token", ")", ":", "text", "=", "[", "]", "while", "1", ":", "token", "=", "parser", ".", "tokens", ".", "pop", "(", "0", ")", "if", "token", ".", "contents", "==", "'endverbatim'", ":", "break", "if", "token", ".", "token_type", "==", "TOKEN_VAR", ":", "text", ".", "append", "(", "'{{ '", ")", "elif", "token", ".", "token_type", "==", "TOKEN_BLOCK", ":", "text", ".", "append", "(", "'{%'", ")", "text", ".", "append", "(", "token", ".", "contents", ")", "if", "token", ".", "token_type", "==", "TOKEN_VAR", ":", "text", ".", "append", "(", "' }}'", ")", "elif", "token", ".", "token_type", "==", "TOKEN_BLOCK", ":", "if", "not", "text", "[", "-", "1", "]", ".", "startswith", "(", "'='", ")", ":", "text", "[", "-", "1", ":", "-", "1", "]", "=", "[", "' '", "]", "text", ".", "append", "(", "' %}'", ")", "return", "VerbatimNode", "(", "''", ".", "join", "(", "text", ")", ")" ]
Tag to render x-tmpl templates with Django template code.
[ "Tag", "to", "render", "x", "-", "tmpl", "templates", "with", "Django", "template", "code", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L502-L520
train
bitlabstudio/django-libs
django_libs/templatetags/libs_tags.py
append_s
def append_s(value): """ Adds the possessive s after a string. value = 'Hans' becomes Hans' and value = 'Susi' becomes Susi's """ if value.endswith('s'): return u"{0}'".format(value) else: return u"{0}'s".format(value)
python
def append_s(value): """ Adds the possessive s after a string. value = 'Hans' becomes Hans' and value = 'Susi' becomes Susi's """ if value.endswith('s'): return u"{0}'".format(value) else: return u"{0}'s".format(value)
[ "def", "append_s", "(", "value", ")", ":", "if", "value", ".", "endswith", "(", "'s'", ")", ":", "return", "u\"{0}'\"", ".", "format", "(", "value", ")", "else", ":", "return", "u\"{0}'s\"", ".", "format", "(", "value", ")" ]
Adds the possessive s after a string. value = 'Hans' becomes Hans' and value = 'Susi' becomes Susi's
[ "Adds", "the", "possessive", "s", "after", "a", "string", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/templatetags/libs_tags.py#L566-L577
train
Genida/django-meerkat
src/meerkat/logs/urls.py
logs_urlpatterns
def logs_urlpatterns(admin_view=lambda x: x): """ Return the URL patterns for the logs views. Args: admin_view (callable): admin_view method from an AdminSite instance. Returns: list: the URL patterns for the logs views. """ return [ url(r'^$', admin_view(LogsMenu.as_view()), name='logs'), url(r'^status_codes$', admin_view(LogsStatusCodes.as_view()), name='logs_status_codes'), url(r'^status_codes_by_date$', admin_view(LogsStatusCodesByDate.as_view()), name='logs_status_codes_by_date'), url(r'^most_visited_pages$', admin_view(LogsMostVisitedPages.as_view()), name='logs_most_visited_pages') ]
python
def logs_urlpatterns(admin_view=lambda x: x): """ Return the URL patterns for the logs views. Args: admin_view (callable): admin_view method from an AdminSite instance. Returns: list: the URL patterns for the logs views. """ return [ url(r'^$', admin_view(LogsMenu.as_view()), name='logs'), url(r'^status_codes$', admin_view(LogsStatusCodes.as_view()), name='logs_status_codes'), url(r'^status_codes_by_date$', admin_view(LogsStatusCodesByDate.as_view()), name='logs_status_codes_by_date'), url(r'^most_visited_pages$', admin_view(LogsMostVisitedPages.as_view()), name='logs_most_visited_pages') ]
[ "def", "logs_urlpatterns", "(", "admin_view", "=", "lambda", "x", ":", "x", ")", ":", "return", "[", "url", "(", "r'^$'", ",", "admin_view", "(", "LogsMenu", ".", "as_view", "(", ")", ")", ",", "name", "=", "'logs'", ")", ",", "url", "(", "r'^status_codes$'", ",", "admin_view", "(", "LogsStatusCodes", ".", "as_view", "(", ")", ")", ",", "name", "=", "'logs_status_codes'", ")", ",", "url", "(", "r'^status_codes_by_date$'", ",", "admin_view", "(", "LogsStatusCodesByDate", ".", "as_view", "(", ")", ")", ",", "name", "=", "'logs_status_codes_by_date'", ")", ",", "url", "(", "r'^most_visited_pages$'", ",", "admin_view", "(", "LogsMostVisitedPages", ".", "as_view", "(", ")", ")", ",", "name", "=", "'logs_most_visited_pages'", ")", "]" ]
Return the URL patterns for the logs views. Args: admin_view (callable): admin_view method from an AdminSite instance. Returns: list: the URL patterns for the logs views.
[ "Return", "the", "URL", "patterns", "for", "the", "logs", "views", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/logs/urls.py#L11-L34
train
Genida/django-meerkat
src/meerkat/utils/ip_info.py
IpInfoHandler._get
def _get(self, ip): """ Get information about an IP. Args: ip (str): an IP (xxx.xxx.xxx.xxx). Returns: dict: see http://ipinfo.io/developers/getting-started """ # Geoloc updated up to once a week: # http://ipinfo.io/developers/data#geolocation-data retries = 10 for retry in range(retries): try: response = requests.get('http://ipinfo.io/%s/json' % ip, verify=False, timeout=1) # nosec if response.status_code == 429: raise RateExceededError return response.json() except (requests.ReadTimeout, requests.ConnectTimeout): pass return {}
python
def _get(self, ip): """ Get information about an IP. Args: ip (str): an IP (xxx.xxx.xxx.xxx). Returns: dict: see http://ipinfo.io/developers/getting-started """ # Geoloc updated up to once a week: # http://ipinfo.io/developers/data#geolocation-data retries = 10 for retry in range(retries): try: response = requests.get('http://ipinfo.io/%s/json' % ip, verify=False, timeout=1) # nosec if response.status_code == 429: raise RateExceededError return response.json() except (requests.ReadTimeout, requests.ConnectTimeout): pass return {}
[ "def", "_get", "(", "self", ",", "ip", ")", ":", "retries", "=", "10", "for", "retry", "in", "range", "(", "retries", ")", ":", "try", ":", "response", "=", "requests", ".", "get", "(", "'http://ipinfo.io/%s/json'", "%", "ip", ",", "verify", "=", "False", ",", "timeout", "=", "1", ")", "if", "response", ".", "status_code", "==", "429", ":", "raise", "RateExceededError", "return", "response", ".", "json", "(", ")", "except", "(", "requests", ".", "ReadTimeout", ",", "requests", ".", "ConnectTimeout", ")", ":", "pass", "return", "{", "}" ]
Get information about an IP. Args: ip (str): an IP (xxx.xxx.xxx.xxx). Returns: dict: see http://ipinfo.io/developers/getting-started
[ "Get", "information", "about", "an", "IP", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/utils/ip_info.py#L100-L122
train
flavio/scsgate
scsgate/messages.py
parse
def parse(data): """ Parses a raw datagram and return the right type of message """ # convert to string data = data.decode("ascii") if len(data) == 2 and data == "A5": return AckMessage() # split into bytes raw = [data[i:i+2] for i in range(len(data)) if i % 2 == 0] if len(raw) != 7: return UnknownMessage(raw) if raw[1] == "B8": return StateMessage(raw) elif raw[3] == "12": return CommandMessage(raw) elif raw[3] == "14": return ScenarioTriggeredMessage(raw) elif raw[3] == "15": return RequestStatusMessage(raw) else: return UnknownMessage(raw)
python
def parse(data): """ Parses a raw datagram and return the right type of message """ # convert to string data = data.decode("ascii") if len(data) == 2 and data == "A5": return AckMessage() # split into bytes raw = [data[i:i+2] for i in range(len(data)) if i % 2 == 0] if len(raw) != 7: return UnknownMessage(raw) if raw[1] == "B8": return StateMessage(raw) elif raw[3] == "12": return CommandMessage(raw) elif raw[3] == "14": return ScenarioTriggeredMessage(raw) elif raw[3] == "15": return RequestStatusMessage(raw) else: return UnknownMessage(raw)
[ "def", "parse", "(", "data", ")", ":", "data", "=", "data", ".", "decode", "(", "\"ascii\"", ")", "if", "len", "(", "data", ")", "==", "2", "and", "data", "==", "\"A5\"", ":", "return", "AckMessage", "(", ")", "raw", "=", "[", "data", "[", "i", ":", "i", "+", "2", "]", "for", "i", "in", "range", "(", "len", "(", "data", ")", ")", "if", "i", "%", "2", "==", "0", "]", "if", "len", "(", "raw", ")", "!=", "7", ":", "return", "UnknownMessage", "(", "raw", ")", "if", "raw", "[", "1", "]", "==", "\"B8\"", ":", "return", "StateMessage", "(", "raw", ")", "elif", "raw", "[", "3", "]", "==", "\"12\"", ":", "return", "CommandMessage", "(", "raw", ")", "elif", "raw", "[", "3", "]", "==", "\"14\"", ":", "return", "ScenarioTriggeredMessage", "(", "raw", ")", "elif", "raw", "[", "3", "]", "==", "\"15\"", ":", "return", "RequestStatusMessage", "(", "raw", ")", "else", ":", "return", "UnknownMessage", "(", "raw", ")" ]
Parses a raw datagram and return the right type of message
[ "Parses", "a", "raw", "datagram", "and", "return", "the", "right", "type", "of", "message" ]
aad1d181eef4714ab475f4ff7fcfac4a6425fbb4
https://github.com/flavio/scsgate/blob/aad1d181eef4714ab475f4ff7fcfac4a6425fbb4/scsgate/messages.py#L213-L237
train
flavio/scsgate
scsgate/messages.py
checksum_bytes
def checksum_bytes(data): """ Returns a XOR of all the bytes specified inside of the given list """ int_values = [int(x, 16) for x in data] int_xor = reduce(lambda x, y: x ^ y, int_values) hex_xor = "{:X}".format(int_xor) if len(hex_xor) % 2 != 0: hex_xor = "0" + hex_xor return str.encode(hex_xor)
python
def checksum_bytes(data): """ Returns a XOR of all the bytes specified inside of the given list """ int_values = [int(x, 16) for x in data] int_xor = reduce(lambda x, y: x ^ y, int_values) hex_xor = "{:X}".format(int_xor) if len(hex_xor) % 2 != 0: hex_xor = "0" + hex_xor return str.encode(hex_xor)
[ "def", "checksum_bytes", "(", "data", ")", ":", "int_values", "=", "[", "int", "(", "x", ",", "16", ")", "for", "x", "in", "data", "]", "int_xor", "=", "reduce", "(", "lambda", "x", ",", "y", ":", "x", "^", "y", ",", "int_values", ")", "hex_xor", "=", "\"{:X}\"", ".", "format", "(", "int_xor", ")", "if", "len", "(", "hex_xor", ")", "%", "2", "!=", "0", ":", "hex_xor", "=", "\"0\"", "+", "hex_xor", "return", "str", ".", "encode", "(", "hex_xor", ")" ]
Returns a XOR of all the bytes specified inside of the given list
[ "Returns", "a", "XOR", "of", "all", "the", "bytes", "specified", "inside", "of", "the", "given", "list" ]
aad1d181eef4714ab475f4ff7fcfac4a6425fbb4
https://github.com/flavio/scsgate/blob/aad1d181eef4714ab475f4ff7fcfac4a6425fbb4/scsgate/messages.py#L240-L249
train
flavio/scsgate
scsgate/messages.py
compose_telegram
def compose_telegram(body): """ Compose a SCS message body: list containing the body of the message. returns: full telegram expressed (bytes instance) """ msg = [b"A8"] + body + [checksum_bytes(body)] + [b"A3"] return str.encode("".join([x.decode() for x in msg]))
python
def compose_telegram(body): """ Compose a SCS message body: list containing the body of the message. returns: full telegram expressed (bytes instance) """ msg = [b"A8"] + body + [checksum_bytes(body)] + [b"A3"] return str.encode("".join([x.decode() for x in msg]))
[ "def", "compose_telegram", "(", "body", ")", ":", "msg", "=", "[", "b\"A8\"", "]", "+", "body", "+", "[", "checksum_bytes", "(", "body", ")", "]", "+", "[", "b\"A3\"", "]", "return", "str", ".", "encode", "(", "\"\"", ".", "join", "(", "[", "x", ".", "decode", "(", ")", "for", "x", "in", "msg", "]", ")", ")" ]
Compose a SCS message body: list containing the body of the message. returns: full telegram expressed (bytes instance)
[ "Compose", "a", "SCS", "message" ]
aad1d181eef4714ab475f4ff7fcfac4a6425fbb4
https://github.com/flavio/scsgate/blob/aad1d181eef4714ab475f4ff7fcfac4a6425fbb4/scsgate/messages.py#L252-L260
train
bitlabstudio/django-libs
django_libs/utils/email.py
send_email
def send_email(request, context, subject_template, body_template, from_email, recipients, priority="medium", reply_to=None, headers=None, cc=None, bcc=None): """ Sends an email based on templates for subject and body. :param request: The current request instance. :param context: A dictionary of items that should be added to the templates' contexts. :param subject_template: A string representing the path to the template of of the email's subject. :param body_template: A string representing the path to the template of the email's body. :param from_email: String that represents the sender of the email. :param recipients: A list of tuples of recipients. The tuples are similar to the ADMINS setting. :param priority: Sets the priority of the email (only used by django-mailer to prioritise email sendings). :param reply_to: Optional email address to reply to. :param headers: Additional dictionary to add header attributes. :param cc: A list of CC recipients :param bcc: A list of BCC recipients """ headers = headers or {} if not reply_to: reply_to = from_email # Add additional context if hasattr(settings, 'DJANGO_LIBS_EMAIL_CONTEXT'): context_fn = load_member_from_setting('DJANGO_LIBS_EMAIL_CONTEXT') context.update(context_fn(request)) if request and request.get_host(): domain = request.get_host() protocol = 'https://' if request.is_secure() else 'http://' else: domain = getattr(settings, 'DOMAIN', Site.objects.get_current().domain) protocol = getattr(settings, 'PROTOCOL', 'http://') context.update({ 'domain': domain, 'protocol': protocol, }) subject = render_to_string(template_name=subject_template, context=context, request=request) subject = ''.join(subject.splitlines()) message_html = render_to_string(template_name=body_template, context=context, request=request) message_plaintext = html_to_plain_text(message_html) subject = force_text(subject) message = force_text(message_plaintext) email = EmailMultiAlternatives( subject=subject, body=message, from_email=from_email, to=recipients, cc=cc, bcc=bcc, headers=headers, reply_to=[reply_to], ) email.attach_alternative(message_html, "text/html") if settings.EMAIL_BACKEND == 'mailer.backend.DbBackend': # We customize `mailer.send_html_mail` to enable CC and BCC priority = mailer.get_priority(priority) msg = make_message( subject=subject, body=message, from_email=from_email, to=recipients, priority=priority, ) msg.email = email msg.save() else: email.send()
python
def send_email(request, context, subject_template, body_template, from_email, recipients, priority="medium", reply_to=None, headers=None, cc=None, bcc=None): """ Sends an email based on templates for subject and body. :param request: The current request instance. :param context: A dictionary of items that should be added to the templates' contexts. :param subject_template: A string representing the path to the template of of the email's subject. :param body_template: A string representing the path to the template of the email's body. :param from_email: String that represents the sender of the email. :param recipients: A list of tuples of recipients. The tuples are similar to the ADMINS setting. :param priority: Sets the priority of the email (only used by django-mailer to prioritise email sendings). :param reply_to: Optional email address to reply to. :param headers: Additional dictionary to add header attributes. :param cc: A list of CC recipients :param bcc: A list of BCC recipients """ headers = headers or {} if not reply_to: reply_to = from_email # Add additional context if hasattr(settings, 'DJANGO_LIBS_EMAIL_CONTEXT'): context_fn = load_member_from_setting('DJANGO_LIBS_EMAIL_CONTEXT') context.update(context_fn(request)) if request and request.get_host(): domain = request.get_host() protocol = 'https://' if request.is_secure() else 'http://' else: domain = getattr(settings, 'DOMAIN', Site.objects.get_current().domain) protocol = getattr(settings, 'PROTOCOL', 'http://') context.update({ 'domain': domain, 'protocol': protocol, }) subject = render_to_string(template_name=subject_template, context=context, request=request) subject = ''.join(subject.splitlines()) message_html = render_to_string(template_name=body_template, context=context, request=request) message_plaintext = html_to_plain_text(message_html) subject = force_text(subject) message = force_text(message_plaintext) email = EmailMultiAlternatives( subject=subject, body=message, from_email=from_email, to=recipients, cc=cc, bcc=bcc, headers=headers, reply_to=[reply_to], ) email.attach_alternative(message_html, "text/html") if settings.EMAIL_BACKEND == 'mailer.backend.DbBackend': # We customize `mailer.send_html_mail` to enable CC and BCC priority = mailer.get_priority(priority) msg = make_message( subject=subject, body=message, from_email=from_email, to=recipients, priority=priority, ) msg.email = email msg.save() else: email.send()
[ "def", "send_email", "(", "request", ",", "context", ",", "subject_template", ",", "body_template", ",", "from_email", ",", "recipients", ",", "priority", "=", "\"medium\"", ",", "reply_to", "=", "None", ",", "headers", "=", "None", ",", "cc", "=", "None", ",", "bcc", "=", "None", ")", ":", "headers", "=", "headers", "or", "{", "}", "if", "not", "reply_to", ":", "reply_to", "=", "from_email", "if", "hasattr", "(", "settings", ",", "'DJANGO_LIBS_EMAIL_CONTEXT'", ")", ":", "context_fn", "=", "load_member_from_setting", "(", "'DJANGO_LIBS_EMAIL_CONTEXT'", ")", "context", ".", "update", "(", "context_fn", "(", "request", ")", ")", "if", "request", "and", "request", ".", "get_host", "(", ")", ":", "domain", "=", "request", ".", "get_host", "(", ")", "protocol", "=", "'https://'", "if", "request", ".", "is_secure", "(", ")", "else", "'http://'", "else", ":", "domain", "=", "getattr", "(", "settings", ",", "'DOMAIN'", ",", "Site", ".", "objects", ".", "get_current", "(", ")", ".", "domain", ")", "protocol", "=", "getattr", "(", "settings", ",", "'PROTOCOL'", ",", "'http://'", ")", "context", ".", "update", "(", "{", "'domain'", ":", "domain", ",", "'protocol'", ":", "protocol", ",", "}", ")", "subject", "=", "render_to_string", "(", "template_name", "=", "subject_template", ",", "context", "=", "context", ",", "request", "=", "request", ")", "subject", "=", "''", ".", "join", "(", "subject", ".", "splitlines", "(", ")", ")", "message_html", "=", "render_to_string", "(", "template_name", "=", "body_template", ",", "context", "=", "context", ",", "request", "=", "request", ")", "message_plaintext", "=", "html_to_plain_text", "(", "message_html", ")", "subject", "=", "force_text", "(", "subject", ")", "message", "=", "force_text", "(", "message_plaintext", ")", "email", "=", "EmailMultiAlternatives", "(", "subject", "=", "subject", ",", "body", "=", "message", ",", "from_email", "=", "from_email", ",", "to", "=", "recipients", ",", "cc", "=", "cc", ",", "bcc", "=", "bcc", ",", "headers", "=", "headers", ",", "reply_to", "=", "[", "reply_to", "]", ",", ")", "email", ".", "attach_alternative", "(", "message_html", ",", "\"text/html\"", ")", "if", "settings", ".", "EMAIL_BACKEND", "==", "'mailer.backend.DbBackend'", ":", "priority", "=", "mailer", ".", "get_priority", "(", "priority", ")", "msg", "=", "make_message", "(", "subject", "=", "subject", ",", "body", "=", "message", ",", "from_email", "=", "from_email", ",", "to", "=", "recipients", ",", "priority", "=", "priority", ",", ")", "msg", ".", "email", "=", "email", "msg", ".", "save", "(", ")", "else", ":", "email", ".", "send", "(", ")" ]
Sends an email based on templates for subject and body. :param request: The current request instance. :param context: A dictionary of items that should be added to the templates' contexts. :param subject_template: A string representing the path to the template of of the email's subject. :param body_template: A string representing the path to the template of the email's body. :param from_email: String that represents the sender of the email. :param recipients: A list of tuples of recipients. The tuples are similar to the ADMINS setting. :param priority: Sets the priority of the email (only used by django-mailer to prioritise email sendings). :param reply_to: Optional email address to reply to. :param headers: Additional dictionary to add header attributes. :param cc: A list of CC recipients :param bcc: A list of BCC recipients
[ "Sends", "an", "email", "based", "on", "templates", "for", "subject", "and", "body", "." ]
2c5376cda084bf16edea540e0f6999f1d844afd0
https://github.com/bitlabstudio/django-libs/blob/2c5376cda084bf16edea540e0f6999f1d844afd0/django_libs/utils/email.py#L18-L92
train
Genida/django-meerkat
src/meerkat/utils/url.py
url_is_project
def url_is_project(url, default='not_a_func'): """ Check if URL is part of the current project's URLs. Args: url (str): URL to check. default (callable): used to filter out some URLs attached to function. Returns: """ try: u = resolve(url) if u and u.func != default: return True except Resolver404: static_url = settings.STATIC_URL static_url_wd = static_url.lstrip('/') if url.startswith(static_url): url = url[len(static_url):] elif url.startswith(static_url_wd): url = url[len(static_url_wd):] else: return False if finders.find(url): return True return False
python
def url_is_project(url, default='not_a_func'): """ Check if URL is part of the current project's URLs. Args: url (str): URL to check. default (callable): used to filter out some URLs attached to function. Returns: """ try: u = resolve(url) if u and u.func != default: return True except Resolver404: static_url = settings.STATIC_URL static_url_wd = static_url.lstrip('/') if url.startswith(static_url): url = url[len(static_url):] elif url.startswith(static_url_wd): url = url[len(static_url_wd):] else: return False if finders.find(url): return True return False
[ "def", "url_is_project", "(", "url", ",", "default", "=", "'not_a_func'", ")", ":", "try", ":", "u", "=", "resolve", "(", "url", ")", "if", "u", "and", "u", ".", "func", "!=", "default", ":", "return", "True", "except", "Resolver404", ":", "static_url", "=", "settings", ".", "STATIC_URL", "static_url_wd", "=", "static_url", ".", "lstrip", "(", "'/'", ")", "if", "url", ".", "startswith", "(", "static_url", ")", ":", "url", "=", "url", "[", "len", "(", "static_url", ")", ":", "]", "elif", "url", ".", "startswith", "(", "static_url_wd", ")", ":", "url", "=", "url", "[", "len", "(", "static_url_wd", ")", ":", "]", "else", ":", "return", "False", "if", "finders", ".", "find", "(", "url", ")", ":", "return", "True", "return", "False" ]
Check if URL is part of the current project's URLs. Args: url (str): URL to check. default (callable): used to filter out some URLs attached to function. Returns:
[ "Check", "if", "URL", "is", "part", "of", "the", "current", "project", "s", "URLs", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/utils/url.py#L14-L40
train
Genida/django-meerkat
src/meerkat/utils/url.py
url_is
def url_is(white_list): """ Function generator. Args: white_list (dict): dict with PREFIXES and CONSTANTS keys (list values). Returns: func: a function to check if a URL is... """ def func(url): prefixes = white_list.get('PREFIXES', ()) for prefix in prefixes: if url.startswith(prefix): return True constants = white_list.get('CONSTANTS', ()) for exact_url in constants: if url == exact_url: return True return False return func
python
def url_is(white_list): """ Function generator. Args: white_list (dict): dict with PREFIXES and CONSTANTS keys (list values). Returns: func: a function to check if a URL is... """ def func(url): prefixes = white_list.get('PREFIXES', ()) for prefix in prefixes: if url.startswith(prefix): return True constants = white_list.get('CONSTANTS', ()) for exact_url in constants: if url == exact_url: return True return False return func
[ "def", "url_is", "(", "white_list", ")", ":", "def", "func", "(", "url", ")", ":", "prefixes", "=", "white_list", ".", "get", "(", "'PREFIXES'", ",", "(", ")", ")", "for", "prefix", "in", "prefixes", ":", "if", "url", ".", "startswith", "(", "prefix", ")", ":", "return", "True", "constants", "=", "white_list", ".", "get", "(", "'CONSTANTS'", ",", "(", ")", ")", "for", "exact_url", "in", "constants", ":", "if", "url", "==", "exact_url", ":", "return", "True", "return", "False", "return", "func" ]
Function generator. Args: white_list (dict): dict with PREFIXES and CONSTANTS keys (list values). Returns: func: a function to check if a URL is...
[ "Function", "generator", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/utils/url.py#L43-L63
train
gsi-upm/soil
soil/history.py
History.save_records
def save_records(self, records): ''' Save a collection of records ''' for record in records: if not isinstance(record, Record): record = Record(*record) self.save_record(*record)
python
def save_records(self, records): ''' Save a collection of records ''' for record in records: if not isinstance(record, Record): record = Record(*record) self.save_record(*record)
[ "def", "save_records", "(", "self", ",", "records", ")", ":", "for", "record", "in", "records", ":", "if", "not", "isinstance", "(", "record", ",", "Record", ")", ":", "record", "=", "Record", "(", "*", "record", ")", "self", ".", "save_record", "(", "*", "record", ")" ]
Save a collection of records
[ "Save", "a", "collection", "of", "records" ]
a3ea434f237f039c3cadbc2e0a83ae626d77b818
https://github.com/gsi-upm/soil/blob/a3ea434f237f039c3cadbc2e0a83ae626d77b818/soil/history.py#L70-L77
train
gsi-upm/soil
soil/history.py
History.save_record
def save_record(self, agent_id, t_step, key, value): ''' Save a collection of records to the database. Database writes are cached. ''' value = self.convert(key, value) self._tups.append(Record(agent_id=agent_id, t_step=t_step, key=key, value=value)) if len(self._tups) > 100: self.flush_cache()
python
def save_record(self, agent_id, t_step, key, value): ''' Save a collection of records to the database. Database writes are cached. ''' value = self.convert(key, value) self._tups.append(Record(agent_id=agent_id, t_step=t_step, key=key, value=value)) if len(self._tups) > 100: self.flush_cache()
[ "def", "save_record", "(", "self", ",", "agent_id", ",", "t_step", ",", "key", ",", "value", ")", ":", "value", "=", "self", ".", "convert", "(", "key", ",", "value", ")", "self", ".", "_tups", ".", "append", "(", "Record", "(", "agent_id", "=", "agent_id", ",", "t_step", "=", "t_step", ",", "key", "=", "key", ",", "value", "=", "value", ")", ")", "if", "len", "(", "self", ".", "_tups", ")", ">", "100", ":", "self", ".", "flush_cache", "(", ")" ]
Save a collection of records to the database. Database writes are cached.
[ "Save", "a", "collection", "of", "records", "to", "the", "database", ".", "Database", "writes", "are", "cached", "." ]
a3ea434f237f039c3cadbc2e0a83ae626d77b818
https://github.com/gsi-upm/soil/blob/a3ea434f237f039c3cadbc2e0a83ae626d77b818/soil/history.py#L79-L90
train
gsi-upm/soil
soil/history.py
History.convert
def convert(self, key, value): """Get the serialized value for a given key.""" if key not in self._dtypes: self.read_types() if key not in self._dtypes: name = utils.name(value) serializer = utils.serializer(name) deserializer = utils.deserializer(name) self._dtypes[key] = (name, serializer, deserializer) with self.db: self.db.execute("replace into value_types (key, value_type) values (?, ?)", (key, name)) return self._dtypes[key][1](value)
python
def convert(self, key, value): """Get the serialized value for a given key.""" if key not in self._dtypes: self.read_types() if key not in self._dtypes: name = utils.name(value) serializer = utils.serializer(name) deserializer = utils.deserializer(name) self._dtypes[key] = (name, serializer, deserializer) with self.db: self.db.execute("replace into value_types (key, value_type) values (?, ?)", (key, name)) return self._dtypes[key][1](value)
[ "def", "convert", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "not", "in", "self", ".", "_dtypes", ":", "self", ".", "read_types", "(", ")", "if", "key", "not", "in", "self", ".", "_dtypes", ":", "name", "=", "utils", ".", "name", "(", "value", ")", "serializer", "=", "utils", ".", "serializer", "(", "name", ")", "deserializer", "=", "utils", ".", "deserializer", "(", "name", ")", "self", ".", "_dtypes", "[", "key", "]", "=", "(", "name", ",", "serializer", ",", "deserializer", ")", "with", "self", ".", "db", ":", "self", ".", "db", ".", "execute", "(", "\"replace into value_types (key, value_type) values (?, ?)\"", ",", "(", "key", ",", "name", ")", ")", "return", "self", ".", "_dtypes", "[", "key", "]", "[", "1", "]", "(", "value", ")" ]
Get the serialized value for a given key.
[ "Get", "the", "serialized", "value", "for", "a", "given", "key", "." ]
a3ea434f237f039c3cadbc2e0a83ae626d77b818
https://github.com/gsi-upm/soil/blob/a3ea434f237f039c3cadbc2e0a83ae626d77b818/soil/history.py#L92-L103
train
gsi-upm/soil
soil/history.py
History.recover
def recover(self, key, value): """Get the deserialized value for a given key, and the serialized version.""" if key not in self._dtypes: self.read_types() if key not in self._dtypes: raise ValueError("Unknown datatype for {} and {}".format(key, value)) return self._dtypes[key][2](value)
python
def recover(self, key, value): """Get the deserialized value for a given key, and the serialized version.""" if key not in self._dtypes: self.read_types() if key not in self._dtypes: raise ValueError("Unknown datatype for {} and {}".format(key, value)) return self._dtypes[key][2](value)
[ "def", "recover", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "not", "in", "self", ".", "_dtypes", ":", "self", ".", "read_types", "(", ")", "if", "key", "not", "in", "self", ".", "_dtypes", ":", "raise", "ValueError", "(", "\"Unknown datatype for {} and {}\"", ".", "format", "(", "key", ",", "value", ")", ")", "return", "self", ".", "_dtypes", "[", "key", "]", "[", "2", "]", "(", "value", ")" ]
Get the deserialized value for a given key, and the serialized version.
[ "Get", "the", "deserialized", "value", "for", "a", "given", "key", "and", "the", "serialized", "version", "." ]
a3ea434f237f039c3cadbc2e0a83ae626d77b818
https://github.com/gsi-upm/soil/blob/a3ea434f237f039c3cadbc2e0a83ae626d77b818/soil/history.py#L105-L111
train
gsi-upm/soil
soil/history.py
History.flush_cache
def flush_cache(self): ''' Use a cache to save state changes to avoid opening a session for every change. The cache will be flushed at the end of the simulation, and when history is accessed. ''' logger.debug('Flushing cache {}'.format(self.db_path)) with self.db: for rec in self._tups: self.db.execute("replace into history(agent_id, t_step, key, value) values (?, ?, ?, ?)", (rec.agent_id, rec.t_step, rec.key, rec.value)) self._tups = list()
python
def flush_cache(self): ''' Use a cache to save state changes to avoid opening a session for every change. The cache will be flushed at the end of the simulation, and when history is accessed. ''' logger.debug('Flushing cache {}'.format(self.db_path)) with self.db: for rec in self._tups: self.db.execute("replace into history(agent_id, t_step, key, value) values (?, ?, ?, ?)", (rec.agent_id, rec.t_step, rec.key, rec.value)) self._tups = list()
[ "def", "flush_cache", "(", "self", ")", ":", "logger", ".", "debug", "(", "'Flushing cache {}'", ".", "format", "(", "self", ".", "db_path", ")", ")", "with", "self", ".", "db", ":", "for", "rec", "in", "self", ".", "_tups", ":", "self", ".", "db", ".", "execute", "(", "\"replace into history(agent_id, t_step, key, value) values (?, ?, ?, ?)\"", ",", "(", "rec", ".", "agent_id", ",", "rec", ".", "t_step", ",", "rec", ".", "key", ",", "rec", ".", "value", ")", ")", "self", ".", "_tups", "=", "list", "(", ")" ]
Use a cache to save state changes to avoid opening a session for every change. The cache will be flushed at the end of the simulation, and when history is accessed.
[ "Use", "a", "cache", "to", "save", "state", "changes", "to", "avoid", "opening", "a", "session", "for", "every", "change", ".", "The", "cache", "will", "be", "flushed", "at", "the", "end", "of", "the", "simulation", "and", "when", "history", "is", "accessed", "." ]
a3ea434f237f039c3cadbc2e0a83ae626d77b818
https://github.com/gsi-upm/soil/blob/a3ea434f237f039c3cadbc2e0a83ae626d77b818/soil/history.py#L114-L123
train
inveniosoftware/invenio-search-ui
examples/app.py
records
def records(): """Load records.""" import pkg_resources import uuid from dojson.contrib.marc21 import marc21 from dojson.contrib.marc21.utils import create_record, split_blob from invenio_pidstore import current_pidstore from invenio_records.api import Record # pkg resources the demodata data_path = pkg_resources.resource_filename( 'invenio_records', 'data/marc21/bibliographic.xml' ) with open(data_path) as source: indexer = RecordIndexer() with db.session.begin_nested(): for index, data in enumerate(split_blob(source.read()), start=1): # create uuid rec_uuid = uuid.uuid4() # do translate record = marc21.do(create_record(data)) # create PID current_pidstore.minters['recid']( rec_uuid, record ) # create record indexer.index(Record.create(record, id_=rec_uuid)) db.session.commit()
python
def records(): """Load records.""" import pkg_resources import uuid from dojson.contrib.marc21 import marc21 from dojson.contrib.marc21.utils import create_record, split_blob from invenio_pidstore import current_pidstore from invenio_records.api import Record # pkg resources the demodata data_path = pkg_resources.resource_filename( 'invenio_records', 'data/marc21/bibliographic.xml' ) with open(data_path) as source: indexer = RecordIndexer() with db.session.begin_nested(): for index, data in enumerate(split_blob(source.read()), start=1): # create uuid rec_uuid = uuid.uuid4() # do translate record = marc21.do(create_record(data)) # create PID current_pidstore.minters['recid']( rec_uuid, record ) # create record indexer.index(Record.create(record, id_=rec_uuid)) db.session.commit()
[ "def", "records", "(", ")", ":", "import", "pkg_resources", "import", "uuid", "from", "dojson", ".", "contrib", ".", "marc21", "import", "marc21", "from", "dojson", ".", "contrib", ".", "marc21", ".", "utils", "import", "create_record", ",", "split_blob", "from", "invenio_pidstore", "import", "current_pidstore", "from", "invenio_records", ".", "api", "import", "Record", "data_path", "=", "pkg_resources", ".", "resource_filename", "(", "'invenio_records'", ",", "'data/marc21/bibliographic.xml'", ")", "with", "open", "(", "data_path", ")", "as", "source", ":", "indexer", "=", "RecordIndexer", "(", ")", "with", "db", ".", "session", ".", "begin_nested", "(", ")", ":", "for", "index", ",", "data", "in", "enumerate", "(", "split_blob", "(", "source", ".", "read", "(", ")", ")", ",", "start", "=", "1", ")", ":", "rec_uuid", "=", "uuid", ".", "uuid4", "(", ")", "record", "=", "marc21", ".", "do", "(", "create_record", "(", "data", ")", ")", "current_pidstore", ".", "minters", "[", "'recid'", "]", "(", "rec_uuid", ",", "record", ")", "indexer", ".", "index", "(", "Record", ".", "create", "(", "record", ",", "id_", "=", "rec_uuid", ")", ")", "db", ".", "session", ".", "commit", "(", ")" ]
Load records.
[ "Load", "records", "." ]
4b61737f938cbfdc1aad6602a73f3a24d53b3312
https://github.com/inveniosoftware/invenio-search-ui/blob/4b61737f938cbfdc1aad6602a73f3a24d53b3312/examples/app.py#L206-L233
train
gsi-upm/soil
soil/simulation.py
Simulation.run_trial_exceptions
def run_trial_exceptions(self, *args, **kwargs): ''' A wrapper for run_trial that catches exceptions and returns them. It is meant for async simulations ''' try: return self.run_trial(*args, **kwargs) except Exception as ex: c = ex.__cause__ c.message = ''.join(traceback.format_exception(type(c), c, c.__traceback__)[:]) return c
python
def run_trial_exceptions(self, *args, **kwargs): ''' A wrapper for run_trial that catches exceptions and returns them. It is meant for async simulations ''' try: return self.run_trial(*args, **kwargs) except Exception as ex: c = ex.__cause__ c.message = ''.join(traceback.format_exception(type(c), c, c.__traceback__)[:]) return c
[ "def", "run_trial_exceptions", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "return", "self", ".", "run_trial", "(", "*", "args", ",", "**", "kwargs", ")", "except", "Exception", "as", "ex", ":", "c", "=", "ex", ".", "__cause__", "c", ".", "message", "=", "''", ".", "join", "(", "traceback", ".", "format_exception", "(", "type", "(", "c", ")", ",", "c", ",", "c", ".", "__traceback__", ")", "[", ":", "]", ")", "return", "c" ]
A wrapper for run_trial that catches exceptions and returns them. It is meant for async simulations
[ "A", "wrapper", "for", "run_trial", "that", "catches", "exceptions", "and", "returns", "them", ".", "It", "is", "meant", "for", "async", "simulations" ]
a3ea434f237f039c3cadbc2e0a83ae626d77b818
https://github.com/gsi-upm/soil/blob/a3ea434f237f039c3cadbc2e0a83ae626d77b818/soil/simulation.py#L196-L206
train
pr-omethe-us/PyKED
pyked/orcid.py
search_orcid
def search_orcid(orcid): """ Search the ORCID public API Specfically, return a dictionary with the personal details (name, etc.) of the person associated with the given ORCID Args: orcid (`str`): The ORCID to be searched Returns: `dict`: Dictionary with the JSON response from the API Raises: `~requests.HTTPError`: If the given ORCID cannot be found, an `~requests.HTTPError` is raised with status code 404 """ url = 'https://pub.orcid.org/v2.1/{orcid}/person'.format(orcid=orcid) r = requests.get(url, headers=headers) if r.status_code != 200: r.raise_for_status() return r.json()
python
def search_orcid(orcid): """ Search the ORCID public API Specfically, return a dictionary with the personal details (name, etc.) of the person associated with the given ORCID Args: orcid (`str`): The ORCID to be searched Returns: `dict`: Dictionary with the JSON response from the API Raises: `~requests.HTTPError`: If the given ORCID cannot be found, an `~requests.HTTPError` is raised with status code 404 """ url = 'https://pub.orcid.org/v2.1/{orcid}/person'.format(orcid=orcid) r = requests.get(url, headers=headers) if r.status_code != 200: r.raise_for_status() return r.json()
[ "def", "search_orcid", "(", "orcid", ")", ":", "url", "=", "'https://pub.orcid.org/v2.1/{orcid}/person'", ".", "format", "(", "orcid", "=", "orcid", ")", "r", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "headers", ")", "if", "r", ".", "status_code", "!=", "200", ":", "r", ".", "raise_for_status", "(", ")", "return", "r", ".", "json", "(", ")" ]
Search the ORCID public API Specfically, return a dictionary with the personal details (name, etc.) of the person associated with the given ORCID Args: orcid (`str`): The ORCID to be searched Returns: `dict`: Dictionary with the JSON response from the API Raises: `~requests.HTTPError`: If the given ORCID cannot be found, an `~requests.HTTPError` is raised with status code 404
[ "Search", "the", "ORCID", "public", "API" ]
d9341a068c1099049a3f1de41c512591f342bf64
https://github.com/pr-omethe-us/PyKED/blob/d9341a068c1099049a3f1de41c512591f342bf64/pyked/orcid.py#L8-L29
train
Genida/django-meerkat
src/meerkat/utils/time.py
daterange
def daterange(start_date, end_date): """ Yield one date per day from starting date to ending date. Args: start_date (date): starting date. end_date (date): ending date. Yields: date: a date for each day within the range. """ for n in range(int((end_date - start_date).days)): yield start_date + timedelta(n)
python
def daterange(start_date, end_date): """ Yield one date per day from starting date to ending date. Args: start_date (date): starting date. end_date (date): ending date. Yields: date: a date for each day within the range. """ for n in range(int((end_date - start_date).days)): yield start_date + timedelta(n)
[ "def", "daterange", "(", "start_date", ",", "end_date", ")", ":", "for", "n", "in", "range", "(", "int", "(", "(", "end_date", "-", "start_date", ")", ".", "days", ")", ")", ":", "yield", "start_date", "+", "timedelta", "(", "n", ")" ]
Yield one date per day from starting date to ending date. Args: start_date (date): starting date. end_date (date): ending date. Yields: date: a date for each day within the range.
[ "Yield", "one", "date", "per", "day", "from", "starting", "date", "to", "ending", "date", "." ]
486502a75bb0800266db785fd32717d8c0eb8deb
https://github.com/Genida/django-meerkat/blob/486502a75bb0800266db785fd32717d8c0eb8deb/src/meerkat/utils/time.py#L21-L33
train