id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
251,300
edeposit/edeposit.amqp.storage
src/edeposit/amqp/storage/web_tools.py
compose_tree_path
def compose_tree_path(tree, issn=False): """ Compose absolute path for given `tree`. Args: pub (obj): :class:`.Tree` instance. issn (bool, default False): Compose URL using ISSN. Returns: str: Absolute path of the tree, without server's address and protocol. """ if issn: return join( "/", ISSN_DOWNLOAD_KEY, basename(tree.issn) ) return join( "/", PATH_DOWNLOAD_KEY, quote_plus(tree.path).replace("%2F", "/"), )
python
def compose_tree_path(tree, issn=False): """ Compose absolute path for given `tree`. Args: pub (obj): :class:`.Tree` instance. issn (bool, default False): Compose URL using ISSN. Returns: str: Absolute path of the tree, without server's address and protocol. """ if issn: return join( "/", ISSN_DOWNLOAD_KEY, basename(tree.issn) ) return join( "/", PATH_DOWNLOAD_KEY, quote_plus(tree.path).replace("%2F", "/"), )
[ "def", "compose_tree_path", "(", "tree", ",", "issn", "=", "False", ")", ":", "if", "issn", ":", "return", "join", "(", "\"/\"", ",", "ISSN_DOWNLOAD_KEY", ",", "basename", "(", "tree", ".", "issn", ")", ")", "return", "join", "(", "\"/\"", ",", "PATH_DOWNLOAD_KEY", ",", "quote_plus", "(", "tree", ".", "path", ")", ".", "replace", "(", "\"%2F\"", ",", "\"/\"", ")", ",", ")" ]
Compose absolute path for given `tree`. Args: pub (obj): :class:`.Tree` instance. issn (bool, default False): Compose URL using ISSN. Returns: str: Absolute path of the tree, without server's address and protocol.
[ "Compose", "absolute", "path", "for", "given", "tree", "." ]
fb6bd326249847de04b17b64e856c878665cea92
https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/web_tools.py#L61-L83
251,301
edeposit/edeposit.amqp.storage
src/edeposit/amqp/storage/web_tools.py
compose_full_url
def compose_full_url(pub, uuid_url=False): """ Compose full url for given `pub`, with protocol, server's address and port. Args: pub (obj): :class:`.DBPublication` instance. uuid_url (bool, default False): Compose URL using UUID. Returns: str: Absolute url of the publication. Raises: PrivatePublicationError: When the `pub` is private publication. """ url = compose_path(pub, uuid_url) if WEB_PORT == 80: return "%s://%s%s" % (_PROTOCOL, WEB_ADDR, url) return "%s://%s:%d%s" % (_PROTOCOL, WEB_ADDR, WEB_PORT, url)
python
def compose_full_url(pub, uuid_url=False): """ Compose full url for given `pub`, with protocol, server's address and port. Args: pub (obj): :class:`.DBPublication` instance. uuid_url (bool, default False): Compose URL using UUID. Returns: str: Absolute url of the publication. Raises: PrivatePublicationError: When the `pub` is private publication. """ url = compose_path(pub, uuid_url) if WEB_PORT == 80: return "%s://%s%s" % (_PROTOCOL, WEB_ADDR, url) return "%s://%s:%d%s" % (_PROTOCOL, WEB_ADDR, WEB_PORT, url)
[ "def", "compose_full_url", "(", "pub", ",", "uuid_url", "=", "False", ")", ":", "url", "=", "compose_path", "(", "pub", ",", "uuid_url", ")", "if", "WEB_PORT", "==", "80", ":", "return", "\"%s://%s%s\"", "%", "(", "_PROTOCOL", ",", "WEB_ADDR", ",", "url", ")", "return", "\"%s://%s:%d%s\"", "%", "(", "_PROTOCOL", ",", "WEB_ADDR", ",", "WEB_PORT", ",", "url", ")" ]
Compose full url for given `pub`, with protocol, server's address and port. Args: pub (obj): :class:`.DBPublication` instance. uuid_url (bool, default False): Compose URL using UUID. Returns: str: Absolute url of the publication. Raises: PrivatePublicationError: When the `pub` is private publication.
[ "Compose", "full", "url", "for", "given", "pub", "with", "protocol", "server", "s", "address", "and", "port", "." ]
fb6bd326249847de04b17b64e856c878665cea92
https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/web_tools.py#L86-L104
251,302
edeposit/edeposit.amqp.storage
src/edeposit/amqp/storage/web_tools.py
compose_tree_url
def compose_tree_url(tree, issn_url=False): """ Compose full url for given `tree`, with protocol, server's address and port. Args: tree (obj): :class:`.Tree` instance. issn_url (bool, default False): Compose URL using ISSN. Returns: str: URL of the tree """ url = compose_tree_path(tree, issn_url) if WEB_PORT == 80: return "%s://%s%s" % (_PROTOCOL, WEB_ADDR, url) return "%s://%s:%d%s" % (_PROTOCOL, WEB_ADDR, WEB_PORT, url)
python
def compose_tree_url(tree, issn_url=False): """ Compose full url for given `tree`, with protocol, server's address and port. Args: tree (obj): :class:`.Tree` instance. issn_url (bool, default False): Compose URL using ISSN. Returns: str: URL of the tree """ url = compose_tree_path(tree, issn_url) if WEB_PORT == 80: return "%s://%s%s" % (_PROTOCOL, WEB_ADDR, url) return "%s://%s:%d%s" % (_PROTOCOL, WEB_ADDR, WEB_PORT, url)
[ "def", "compose_tree_url", "(", "tree", ",", "issn_url", "=", "False", ")", ":", "url", "=", "compose_tree_path", "(", "tree", ",", "issn_url", ")", "if", "WEB_PORT", "==", "80", ":", "return", "\"%s://%s%s\"", "%", "(", "_PROTOCOL", ",", "WEB_ADDR", ",", "url", ")", "return", "\"%s://%s:%d%s\"", "%", "(", "_PROTOCOL", ",", "WEB_ADDR", ",", "WEB_PORT", ",", "url", ")" ]
Compose full url for given `tree`, with protocol, server's address and port. Args: tree (obj): :class:`.Tree` instance. issn_url (bool, default False): Compose URL using ISSN. Returns: str: URL of the tree
[ "Compose", "full", "url", "for", "given", "tree", "with", "protocol", "server", "s", "address", "and", "port", "." ]
fb6bd326249847de04b17b64e856c878665cea92
https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/web_tools.py#L107-L124
251,303
eallik/spinoff
spinoff/util/meta.py
profile
def profile(func): """ Simple profile decorator, monitors method execution time """ @inlineCallbacks def callme(*args, **kwargs): start = time.time() ret = yield func(*args, **kwargs) time_to_execute = time.time() - start log.msg('%s executed in %.3f seconds' % (func.__name__, time_to_execute)) returnValue(ret) return callme
python
def profile(func): """ Simple profile decorator, monitors method execution time """ @inlineCallbacks def callme(*args, **kwargs): start = time.time() ret = yield func(*args, **kwargs) time_to_execute = time.time() - start log.msg('%s executed in %.3f seconds' % (func.__name__, time_to_execute)) returnValue(ret) return callme
[ "def", "profile", "(", "func", ")", ":", "@", "inlineCallbacks", "def", "callme", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "start", "=", "time", ".", "time", "(", ")", "ret", "=", "yield", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "time_to_execute", "=", "time", ".", "time", "(", ")", "-", "start", "log", ".", "msg", "(", "'%s executed in %.3f seconds'", "%", "(", "func", ".", "__name__", ",", "time_to_execute", ")", ")", "returnValue", "(", "ret", ")", "return", "callme" ]
Simple profile decorator, monitors method execution time
[ "Simple", "profile", "decorator", "monitors", "method", "execution", "time" ]
06b00d6b86c7422c9cb8f9a4b2915906e92b7d52
https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/spinoff/util/meta.py#L9-L20
251,304
stain/forgetSQL
lib/forgetSQL.py
prepareClasses
def prepareClasses(locals): """Fix _userClasses and some stuff in classes. Traverses locals, which is a locals() dictionary from the namespace where Forgetter subclasses have been defined, and resolves names in _userClasses to real class-references. Normally you would call forgettSQL.prepareClasses(locals()) after defining all classes in your local module. prepareClasses will only touch objects in the name space that is a subclassed of Forgetter. """ for (name, forgetter) in locals.items(): if not (type(forgetter) is types.TypeType and issubclass(forgetter, Forgetter)): # Only care about Forgetter objects continue # Resolve classes for (key, userclass) in forgetter._userClasses.items(): if type(userclass) is types.StringType: # resolve from locals resolved = locals[userclass] forgetter._userClasses[key] = resolved forgetter._tables = {} # Update all fields with proper names for (field, sqlfield) in forgetter._sqlFields.items(): forgetter._sqlFields[field] = forgetter._checkTable(sqlfield) newLinks = [] for linkpair in forgetter._sqlLinks: (link1, link2) = linkpair link1=forgetter._checkTable(link1) link2=forgetter._checkTable(link2) newLinks.append((link1, link2)) forgetter._sqlLinks = newLinks forgetter._prepared = True
python
def prepareClasses(locals): """Fix _userClasses and some stuff in classes. Traverses locals, which is a locals() dictionary from the namespace where Forgetter subclasses have been defined, and resolves names in _userClasses to real class-references. Normally you would call forgettSQL.prepareClasses(locals()) after defining all classes in your local module. prepareClasses will only touch objects in the name space that is a subclassed of Forgetter. """ for (name, forgetter) in locals.items(): if not (type(forgetter) is types.TypeType and issubclass(forgetter, Forgetter)): # Only care about Forgetter objects continue # Resolve classes for (key, userclass) in forgetter._userClasses.items(): if type(userclass) is types.StringType: # resolve from locals resolved = locals[userclass] forgetter._userClasses[key] = resolved forgetter._tables = {} # Update all fields with proper names for (field, sqlfield) in forgetter._sqlFields.items(): forgetter._sqlFields[field] = forgetter._checkTable(sqlfield) newLinks = [] for linkpair in forgetter._sqlLinks: (link1, link2) = linkpair link1=forgetter._checkTable(link1) link2=forgetter._checkTable(link2) newLinks.append((link1, link2)) forgetter._sqlLinks = newLinks forgetter._prepared = True
[ "def", "prepareClasses", "(", "locals", ")", ":", "for", "(", "name", ",", "forgetter", ")", "in", "locals", ".", "items", "(", ")", ":", "if", "not", "(", "type", "(", "forgetter", ")", "is", "types", ".", "TypeType", "and", "issubclass", "(", "forgetter", ",", "Forgetter", ")", ")", ":", "# Only care about Forgetter objects", "continue", "# Resolve classes", "for", "(", "key", ",", "userclass", ")", "in", "forgetter", ".", "_userClasses", ".", "items", "(", ")", ":", "if", "type", "(", "userclass", ")", "is", "types", ".", "StringType", ":", "# resolve from locals", "resolved", "=", "locals", "[", "userclass", "]", "forgetter", ".", "_userClasses", "[", "key", "]", "=", "resolved", "forgetter", ".", "_tables", "=", "{", "}", "# Update all fields with proper names", "for", "(", "field", ",", "sqlfield", ")", "in", "forgetter", ".", "_sqlFields", ".", "items", "(", ")", ":", "forgetter", ".", "_sqlFields", "[", "field", "]", "=", "forgetter", ".", "_checkTable", "(", "sqlfield", ")", "newLinks", "=", "[", "]", "for", "linkpair", "in", "forgetter", ".", "_sqlLinks", ":", "(", "link1", ",", "link2", ")", "=", "linkpair", "link1", "=", "forgetter", ".", "_checkTable", "(", "link1", ")", "link2", "=", "forgetter", ".", "_checkTable", "(", "link2", ")", "newLinks", ".", "append", "(", "(", "link1", ",", "link2", ")", ")", "forgetter", ".", "_sqlLinks", "=", "newLinks", "forgetter", ".", "_prepared", "=", "True" ]
Fix _userClasses and some stuff in classes. Traverses locals, which is a locals() dictionary from the namespace where Forgetter subclasses have been defined, and resolves names in _userClasses to real class-references. Normally you would call forgettSQL.prepareClasses(locals()) after defining all classes in your local module. prepareClasses will only touch objects in the name space that is a subclassed of Forgetter.
[ "Fix", "_userClasses", "and", "some", "stuff", "in", "classes", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L959-L998
251,305
stain/forgetSQL
lib/forgetSQL.py
Forgetter._setID
def _setID(self, id): """Set the ID, ie. the values for primary keys. id can be either a list, following the _sqlPrimary, or some other type, that will be set as the singleton ID (requires 1-length sqlPrimary). """ if type(id) in (types.ListType, types.TupleType): try: for key in self._sqlPrimary: value = id[0] self.__dict__[key] = value id = id[1:] # rest, go revursive except IndexError: raise 'Not enough id fields, required: %s' % len(self._sqlPrimary) elif len(self._sqlPrimary) <= 1: # It's a simple value key = self._sqlPrimary[0] self.__dict__[key] = id else: raise 'Not enough id fields, required: %s' % len(self._sqlPrimary) self._new = False
python
def _setID(self, id): """Set the ID, ie. the values for primary keys. id can be either a list, following the _sqlPrimary, or some other type, that will be set as the singleton ID (requires 1-length sqlPrimary). """ if type(id) in (types.ListType, types.TupleType): try: for key in self._sqlPrimary: value = id[0] self.__dict__[key] = value id = id[1:] # rest, go revursive except IndexError: raise 'Not enough id fields, required: %s' % len(self._sqlPrimary) elif len(self._sqlPrimary) <= 1: # It's a simple value key = self._sqlPrimary[0] self.__dict__[key] = id else: raise 'Not enough id fields, required: %s' % len(self._sqlPrimary) self._new = False
[ "def", "_setID", "(", "self", ",", "id", ")", ":", "if", "type", "(", "id", ")", "in", "(", "types", ".", "ListType", ",", "types", ".", "TupleType", ")", ":", "try", ":", "for", "key", "in", "self", ".", "_sqlPrimary", ":", "value", "=", "id", "[", "0", "]", "self", ".", "__dict__", "[", "key", "]", "=", "value", "id", "=", "id", "[", "1", ":", "]", "# rest, go revursive", "except", "IndexError", ":", "raise", "'Not enough id fields, required: %s'", "%", "len", "(", "self", ".", "_sqlPrimary", ")", "elif", "len", "(", "self", ".", "_sqlPrimary", ")", "<=", "1", ":", "# It's a simple value", "key", "=", "self", ".", "_sqlPrimary", "[", "0", "]", "self", ".", "__dict__", "[", "key", "]", "=", "id", "else", ":", "raise", "'Not enough id fields, required: %s'", "%", "len", "(", "self", ".", "_sqlPrimary", ")", "self", ".", "_new", "=", "False" ]
Set the ID, ie. the values for primary keys. id can be either a list, following the _sqlPrimary, or some other type, that will be set as the singleton ID (requires 1-length sqlPrimary).
[ "Set", "the", "ID", "ie", ".", "the", "values", "for", "primary", "keys", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L279-L300
251,306
stain/forgetSQL
lib/forgetSQL.py
Forgetter._getID
def _getID(self): """Get the ID values as a tuple annotated by sqlPrimary""" id = [] for key in self._sqlPrimary: value = self.__dict__[key] if isinstance(value, Forgetter): # It's another object, we store only the ID if value._new: # It's a new object too, it must be saved! value.save() try: (value,) = value._getID() except: raise "Unsupported: Part %s of %s primary key is a reference to %s, with multiple-primary-key %s " % (key, self.__class__, value.__class__, value) id.append(value) return id
python
def _getID(self): """Get the ID values as a tuple annotated by sqlPrimary""" id = [] for key in self._sqlPrimary: value = self.__dict__[key] if isinstance(value, Forgetter): # It's another object, we store only the ID if value._new: # It's a new object too, it must be saved! value.save() try: (value,) = value._getID() except: raise "Unsupported: Part %s of %s primary key is a reference to %s, with multiple-primary-key %s " % (key, self.__class__, value.__class__, value) id.append(value) return id
[ "def", "_getID", "(", "self", ")", ":", "id", "=", "[", "]", "for", "key", "in", "self", ".", "_sqlPrimary", ":", "value", "=", "self", ".", "__dict__", "[", "key", "]", "if", "isinstance", "(", "value", ",", "Forgetter", ")", ":", "# It's another object, we store only the ID", "if", "value", ".", "_new", ":", "# It's a new object too, it must be saved!", "value", ".", "save", "(", ")", "try", ":", "(", "value", ",", ")", "=", "value", ".", "_getID", "(", ")", "except", ":", "raise", "\"Unsupported: Part %s of %s primary key is a reference to %s, with multiple-primary-key %s \"", "%", "(", "key", ",", "self", ".", "__class__", ",", "value", ".", "__class__", ",", "value", ")", "id", ".", "append", "(", "value", ")", "return", "id" ]
Get the ID values as a tuple annotated by sqlPrimary
[ "Get", "the", "ID", "values", "as", "a", "tuple", "annotated", "by", "sqlPrimary" ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L302-L317
251,307
stain/forgetSQL
lib/forgetSQL.py
Forgetter._resetID
def _resetID(self): """Reset all ID fields.""" # Dirty.. .=)) self._setID((None,) * len(self._sqlPrimary)) self._new = True
python
def _resetID(self): """Reset all ID fields.""" # Dirty.. .=)) self._setID((None,) * len(self._sqlPrimary)) self._new = True
[ "def", "_resetID", "(", "self", ")", ":", "# Dirty.. .=))", "self", ".", "_setID", "(", "(", "None", ",", ")", "*", "len", "(", "self", ".", "_sqlPrimary", ")", ")", "self", ".", "_new", "=", "True" ]
Reset all ID fields.
[ "Reset", "all", "ID", "fields", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L319-L323
251,308
stain/forgetSQL
lib/forgetSQL.py
Forgetter._checkTable
def _checkTable(cls, field): """Split a field from _sqlFields into table, column. Registers the table in cls._tables, and returns a fully qualified table.column (default table: cls._sqlTable) """ # Get table part try: (table, field) = field.split('.') except ValueError: table = cls._sqlTable # clean away white space table = table.strip() field = field.strip() # register table cls._tables[table] = None # and return in proper shape return table + '.' + field
python
def _checkTable(cls, field): """Split a field from _sqlFields into table, column. Registers the table in cls._tables, and returns a fully qualified table.column (default table: cls._sqlTable) """ # Get table part try: (table, field) = field.split('.') except ValueError: table = cls._sqlTable # clean away white space table = table.strip() field = field.strip() # register table cls._tables[table] = None # and return in proper shape return table + '.' + field
[ "def", "_checkTable", "(", "cls", ",", "field", ")", ":", "# Get table part", "try", ":", "(", "table", ",", "field", ")", "=", "field", ".", "split", "(", "'.'", ")", "except", "ValueError", ":", "table", "=", "cls", ".", "_sqlTable", "# clean away white space", "table", "=", "table", ".", "strip", "(", ")", "field", "=", "field", ".", "strip", "(", ")", "# register table", "cls", ".", "_tables", "[", "table", "]", "=", "None", "# and return in proper shape", "return", "table", "+", "'.'", "+", "field" ]
Split a field from _sqlFields into table, column. Registers the table in cls._tables, and returns a fully qualified table.column (default table: cls._sqlTable)
[ "Split", "a", "field", "from", "_sqlFields", "into", "table", "column", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L381-L398
251,309
stain/forgetSQL
lib/forgetSQL.py
Forgetter.reset
def reset(self): """Reset all fields, almost like creating a new object. Note: Forgets changes you have made not saved to database! (Remember: Others might reference the object already, expecting something else!) Override this method if you add properties not defined in _sqlFields. """ self._resetID() self._new = None self._updated = None self._changed = None self._values = {} # initially create fields for field in self._sqlFields.keys(): self._values[field] = None
python
def reset(self): """Reset all fields, almost like creating a new object. Note: Forgets changes you have made not saved to database! (Remember: Others might reference the object already, expecting something else!) Override this method if you add properties not defined in _sqlFields. """ self._resetID() self._new = None self._updated = None self._changed = None self._values = {} # initially create fields for field in self._sqlFields.keys(): self._values[field] = None
[ "def", "reset", "(", "self", ")", ":", "self", ".", "_resetID", "(", ")", "self", ".", "_new", "=", "None", "self", ".", "_updated", "=", "None", "self", ".", "_changed", "=", "None", "self", ".", "_values", "=", "{", "}", "# initially create fields", "for", "field", "in", "self", ".", "_sqlFields", ".", "keys", "(", ")", ":", "self", ".", "_values", "[", "field", "]", "=", "None" ]
Reset all fields, almost like creating a new object. Note: Forgets changes you have made not saved to database! (Remember: Others might reference the object already, expecting something else!) Override this method if you add properties not defined in _sqlFields.
[ "Reset", "all", "fields", "almost", "like", "creating", "a", "new", "object", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L402-L417
251,310
stain/forgetSQL
lib/forgetSQL.py
Forgetter.load
def load(self, id=None): """Load from database. Old values will be discarded.""" if id is not None: # We are asked to change our ID to something else self.reset() self._setID(id) if not self._new and self._validID(): self._loadDB() self._updated = time.time()
python
def load(self, id=None): """Load from database. Old values will be discarded.""" if id is not None: # We are asked to change our ID to something else self.reset() self._setID(id) if not self._new and self._validID(): self._loadDB() self._updated = time.time()
[ "def", "load", "(", "self", ",", "id", "=", "None", ")", ":", "if", "id", "is", "not", "None", ":", "# We are asked to change our ID to something else", "self", ".", "reset", "(", ")", "self", ".", "_setID", "(", "id", ")", "if", "not", "self", ".", "_new", "and", "self", ".", "_validID", "(", ")", ":", "self", ".", "_loadDB", "(", ")", "self", ".", "_updated", "=", "time", ".", "time", "(", ")" ]
Load from database. Old values will be discarded.
[ "Load", "from", "database", ".", "Old", "values", "will", "be", "discarded", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L419-L427
251,311
stain/forgetSQL
lib/forgetSQL.py
Forgetter.save
def save(self): """Save to database if anything has changed since last load""" if ( self._new or (self._validID() and self._changed) or (self._updated and self._changed > self._updated) ): # Don't save if we have not loaded existing data! self._saveDB() return True return False
python
def save(self): """Save to database if anything has changed since last load""" if ( self._new or (self._validID() and self._changed) or (self._updated and self._changed > self._updated) ): # Don't save if we have not loaded existing data! self._saveDB() return True return False
[ "def", "save", "(", "self", ")", ":", "if", "(", "self", ".", "_new", "or", "(", "self", ".", "_validID", "(", ")", "and", "self", ".", "_changed", ")", "or", "(", "self", ".", "_updated", "and", "self", ".", "_changed", ">", "self", ".", "_updated", ")", ")", ":", "# Don't save if we have not loaded existing data!", "self", ".", "_saveDB", "(", ")", "return", "True", "return", "False" ]
Save to database if anything has changed since last load
[ "Save", "to", "database", "if", "anything", "has", "changed", "since", "last", "load" ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L429-L437
251,312
stain/forgetSQL
lib/forgetSQL.py
Forgetter.delete
def delete(self): """Mark this object for deletion in the database. The object will then be reset and ready for use again with a new id. """ (sql, ) = self._prepareSQL("DELETE") curs = self.cursor() curs.execute(sql, self._getID()) curs.close() self.reset()
python
def delete(self): """Mark this object for deletion in the database. The object will then be reset and ready for use again with a new id. """ (sql, ) = self._prepareSQL("DELETE") curs = self.cursor() curs.execute(sql, self._getID()) curs.close() self.reset()
[ "def", "delete", "(", "self", ")", ":", "(", "sql", ",", ")", "=", "self", ".", "_prepareSQL", "(", "\"DELETE\"", ")", "curs", "=", "self", ".", "cursor", "(", ")", "curs", ".", "execute", "(", "sql", ",", "self", ".", "_getID", "(", ")", ")", "curs", ".", "close", "(", ")", "self", ".", "reset", "(", ")" ]
Mark this object for deletion in the database. The object will then be reset and ready for use again with a new id.
[ "Mark", "this", "object", "for", "deletion", "in", "the", "database", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L439-L449
251,313
stain/forgetSQL
lib/forgetSQL.py
Forgetter._nextSequence
def _nextSequence(cls, name=None): """Return a new sequence number for insertion in self._sqlTable. Note that if your sequences are not named tablename_primarykey_seq (ie. for table 'blapp' with primary key 'john_id', sequence name blapp_john_id_seq) you must give the full sequence name as an optional argument to _nextSequence) """ if not name: name = cls._sqlSequence if not name: # Assume it's tablename_primarykey_seq if len(cls._sqlPrimary) <> 1: raise "Could not guess sequence name for multi-primary-key" primary = cls._sqlPrimary[0] name = '%s_%s_seq' % (cls._sqlTable, primary.replace('.','_')) # Don't have . as a tablename or column name! =) curs = cls.cursor() curs.execute("SELECT nextval('%s')" % name) value = curs.fetchone()[0] curs.close() return value
python
def _nextSequence(cls, name=None): """Return a new sequence number for insertion in self._sqlTable. Note that if your sequences are not named tablename_primarykey_seq (ie. for table 'blapp' with primary key 'john_id', sequence name blapp_john_id_seq) you must give the full sequence name as an optional argument to _nextSequence) """ if not name: name = cls._sqlSequence if not name: # Assume it's tablename_primarykey_seq if len(cls._sqlPrimary) <> 1: raise "Could not guess sequence name for multi-primary-key" primary = cls._sqlPrimary[0] name = '%s_%s_seq' % (cls._sqlTable, primary.replace('.','_')) # Don't have . as a tablename or column name! =) curs = cls.cursor() curs.execute("SELECT nextval('%s')" % name) value = curs.fetchone()[0] curs.close() return value
[ "def", "_nextSequence", "(", "cls", ",", "name", "=", "None", ")", ":", "if", "not", "name", ":", "name", "=", "cls", ".", "_sqlSequence", "if", "not", "name", ":", "# Assume it's tablename_primarykey_seq", "if", "len", "(", "cls", ".", "_sqlPrimary", ")", "<>", "1", ":", "raise", "\"Could not guess sequence name for multi-primary-key\"", "primary", "=", "cls", ".", "_sqlPrimary", "[", "0", "]", "name", "=", "'%s_%s_seq'", "%", "(", "cls", ".", "_sqlTable", ",", "primary", ".", "replace", "(", "'.'", ",", "'_'", ")", ")", "# Don't have . as a tablename or column name! =)", "curs", "=", "cls", ".", "cursor", "(", ")", "curs", ".", "execute", "(", "\"SELECT nextval('%s')\"", "%", "name", ")", "value", "=", "curs", ".", "fetchone", "(", ")", "[", "0", "]", "curs", ".", "close", "(", ")", "return", "value" ]
Return a new sequence number for insertion in self._sqlTable. Note that if your sequences are not named tablename_primarykey_seq (ie. for table 'blapp' with primary key 'john_id', sequence name blapp_john_id_seq) you must give the full sequence name as an optional argument to _nextSequence)
[ "Return", "a", "new", "sequence", "number", "for", "insertion", "in", "self", ".", "_sqlTable", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L608-L629
251,314
stain/forgetSQL
lib/forgetSQL.py
Forgetter._loadFromRow
def _loadFromRow(self, result, fields, cursor): """Load from a database row, described by fields. ``fields`` should be the attribute names that will be set. Note that userclasses will be created (but not loaded). """ position = 0 for elem in fields: value = result[position] valueType = cursor.description[position][1] if hasattr(self._dbModule, 'BOOLEAN') and \ valueType == self._dbModule.BOOLEAN and \ (value is not True or value is not False): # convert to a python boolean value = value and True or False if value and self._userClasses.has_key(elem): userClass = self._userClasses[elem] # create an instance value = userClass(value) self._values[elem] = value position += 1
python
def _loadFromRow(self, result, fields, cursor): """Load from a database row, described by fields. ``fields`` should be the attribute names that will be set. Note that userclasses will be created (but not loaded). """ position = 0 for elem in fields: value = result[position] valueType = cursor.description[position][1] if hasattr(self._dbModule, 'BOOLEAN') and \ valueType == self._dbModule.BOOLEAN and \ (value is not True or value is not False): # convert to a python boolean value = value and True or False if value and self._userClasses.has_key(elem): userClass = self._userClasses[elem] # create an instance value = userClass(value) self._values[elem] = value position += 1
[ "def", "_loadFromRow", "(", "self", ",", "result", ",", "fields", ",", "cursor", ")", ":", "position", "=", "0", "for", "elem", "in", "fields", ":", "value", "=", "result", "[", "position", "]", "valueType", "=", "cursor", ".", "description", "[", "position", "]", "[", "1", "]", "if", "hasattr", "(", "self", ".", "_dbModule", ",", "'BOOLEAN'", ")", "and", "valueType", "==", "self", ".", "_dbModule", ".", "BOOLEAN", "and", "(", "value", "is", "not", "True", "or", "value", "is", "not", "False", ")", ":", "# convert to a python boolean", "value", "=", "value", "and", "True", "or", "False", "if", "value", "and", "self", ".", "_userClasses", ".", "has_key", "(", "elem", ")", ":", "userClass", "=", "self", ".", "_userClasses", "[", "elem", "]", "# create an instance", "value", "=", "userClass", "(", "value", ")", "self", ".", "_values", "[", "elem", "]", "=", "value", "position", "+=", "1" ]
Load from a database row, described by fields. ``fields`` should be the attribute names that will be set. Note that userclasses will be created (but not loaded).
[ "Load", "from", "a", "database", "row", "described", "by", "fields", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L633-L655
251,315
stain/forgetSQL
lib/forgetSQL.py
Forgetter._loadDB
def _loadDB(self): """Connect to the database to load myself""" if not self._validID(): raise NotFound, self._getID() (sql, fields) = self._prepareSQL("SELECT") curs = self.cursor() curs.execute(sql, self._getID()) result = curs.fetchone() if not result: curs.close() raise NotFound, self._getID() self._loadFromRow(result, fields, curs) curs.close() self._updated = time.time()
python
def _loadDB(self): """Connect to the database to load myself""" if not self._validID(): raise NotFound, self._getID() (sql, fields) = self._prepareSQL("SELECT") curs = self.cursor() curs.execute(sql, self._getID()) result = curs.fetchone() if not result: curs.close() raise NotFound, self._getID() self._loadFromRow(result, fields, curs) curs.close() self._updated = time.time()
[ "def", "_loadDB", "(", "self", ")", ":", "if", "not", "self", ".", "_validID", "(", ")", ":", "raise", "NotFound", ",", "self", ".", "_getID", "(", ")", "(", "sql", ",", "fields", ")", "=", "self", ".", "_prepareSQL", "(", "\"SELECT\"", ")", "curs", "=", "self", ".", "cursor", "(", ")", "curs", ".", "execute", "(", "sql", ",", "self", ".", "_getID", "(", ")", ")", "result", "=", "curs", ".", "fetchone", "(", ")", "if", "not", "result", ":", "curs", ".", "close", "(", ")", "raise", "NotFound", ",", "self", ".", "_getID", "(", ")", "self", ".", "_loadFromRow", "(", "result", ",", "fields", ",", "curs", ")", "curs", ".", "close", "(", ")", "self", ".", "_updated", "=", "time", ".", "time", "(", ")" ]
Connect to the database to load myself
[ "Connect", "to", "the", "database", "to", "load", "myself" ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L657-L670
251,316
stain/forgetSQL
lib/forgetSQL.py
Forgetter._saveDB
def _saveDB(self): """Insert or update into the database. Note that every field will be updated, not just the changed one. """ # We're a "fresh" copy now self._updated = time.time() if self._new: operation = 'INSERT' if not self._validID(): self._setID(self._nextSequence()) # Note that we assign this ID to our self # BEFORE possibly saving any of our attribute # objects that might be new as well. This means # that they might have references to us, as long # as the database does not require our existence # yet. # # Since mysql does not have Sequences, this will # not work as smoothly there. See class # MysqlForgetter below. else: operation = 'UPDATE' (sql, fields) = self._prepareSQL(operation) values = [] for field in fields: value = getattr(self, field) # First some dirty datatype hacks if DateTime and type(value) == DateTime.DateTimeType: # stupid psycopg does not support it's own return type.. # lovely.. value = str(value) if DateTime and type(value) == DateTime.DateTimeDeltaType: # Format delta as days, hours, minutes seconds # NOTE: includes value.second directly to get the # whole floating number value = value.strftime("%d %H:%M:") + str(value.second) if value is True or value is False: # We must store booleans as 't' and 'f' ... value = value and 't' or 'f' if isinstance(value, Forgetter): # It's another object, we store only the ID if value._new: # It's a new object too, it must be saved! value.save() try: (value,) = value._getID() except: raise "Unsupported: Can't reference multiple-primary-key: %s" % value values.append(value) cursor = self.cursor() cursor.execute(sql, values) # cursor.commit() cursor.close() self._new = False self._changed = None
python
def _saveDB(self): """Insert or update into the database. Note that every field will be updated, not just the changed one. """ # We're a "fresh" copy now self._updated = time.time() if self._new: operation = 'INSERT' if not self._validID(): self._setID(self._nextSequence()) # Note that we assign this ID to our self # BEFORE possibly saving any of our attribute # objects that might be new as well. This means # that they might have references to us, as long # as the database does not require our existence # yet. # # Since mysql does not have Sequences, this will # not work as smoothly there. See class # MysqlForgetter below. else: operation = 'UPDATE' (sql, fields) = self._prepareSQL(operation) values = [] for field in fields: value = getattr(self, field) # First some dirty datatype hacks if DateTime and type(value) == DateTime.DateTimeType: # stupid psycopg does not support it's own return type.. # lovely.. value = str(value) if DateTime and type(value) == DateTime.DateTimeDeltaType: # Format delta as days, hours, minutes seconds # NOTE: includes value.second directly to get the # whole floating number value = value.strftime("%d %H:%M:") + str(value.second) if value is True or value is False: # We must store booleans as 't' and 'f' ... value = value and 't' or 'f' if isinstance(value, Forgetter): # It's another object, we store only the ID if value._new: # It's a new object too, it must be saved! value.save() try: (value,) = value._getID() except: raise "Unsupported: Can't reference multiple-primary-key: %s" % value values.append(value) cursor = self.cursor() cursor.execute(sql, values) # cursor.commit() cursor.close() self._new = False self._changed = None
[ "def", "_saveDB", "(", "self", ")", ":", "# We're a \"fresh\" copy now", "self", ".", "_updated", "=", "time", ".", "time", "(", ")", "if", "self", ".", "_new", ":", "operation", "=", "'INSERT'", "if", "not", "self", ".", "_validID", "(", ")", ":", "self", ".", "_setID", "(", "self", ".", "_nextSequence", "(", ")", ")", "# Note that we assign this ID to our self", "# BEFORE possibly saving any of our attribute", "# objects that might be new as well. This means", "# that they might have references to us, as long", "# as the database does not require our existence", "# yet.", "#", "# Since mysql does not have Sequences, this will", "# not work as smoothly there. See class", "# MysqlForgetter below.", "else", ":", "operation", "=", "'UPDATE'", "(", "sql", ",", "fields", ")", "=", "self", ".", "_prepareSQL", "(", "operation", ")", "values", "=", "[", "]", "for", "field", "in", "fields", ":", "value", "=", "getattr", "(", "self", ",", "field", ")", "# First some dirty datatype hacks", "if", "DateTime", "and", "type", "(", "value", ")", "==", "DateTime", ".", "DateTimeType", ":", "# stupid psycopg does not support it's own return type..", "# lovely..", "value", "=", "str", "(", "value", ")", "if", "DateTime", "and", "type", "(", "value", ")", "==", "DateTime", ".", "DateTimeDeltaType", ":", "# Format delta as days, hours, minutes seconds", "# NOTE: includes value.second directly to get the", "# whole floating number", "value", "=", "value", ".", "strftime", "(", "\"%d %H:%M:\"", ")", "+", "str", "(", "value", ".", "second", ")", "if", "value", "is", "True", "or", "value", "is", "False", ":", "# We must store booleans as 't' and 'f' ...", "value", "=", "value", "and", "'t'", "or", "'f'", "if", "isinstance", "(", "value", ",", "Forgetter", ")", ":", "# It's another object, we store only the ID", "if", "value", ".", "_new", ":", "# It's a new object too, it must be saved!", "value", ".", "save", "(", ")", "try", ":", "(", "value", ",", ")", "=", "value", ".", "_getID", "(", ")", "except", ":", "raise", "\"Unsupported: Can't reference multiple-primary-key: %s\"", "%", "value", "values", ".", "append", "(", "value", ")", "cursor", "=", "self", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "sql", ",", "values", ")", "# cursor.commit()", "cursor", ".", "close", "(", ")", "self", ".", "_new", "=", "False", "self", ".", "_changed", "=", "None" ]
Insert or update into the database. Note that every field will be updated, not just the changed one.
[ "Insert", "or", "update", "into", "the", "database", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L672-L728
251,317
stain/forgetSQL
lib/forgetSQL.py
Forgetter.getAll
def getAll(cls, where=None, orderBy=None): """Retrieve all the objects. If a list of ``where`` clauses are given, they will be AND-ed and will limit the search. This will not load everything out from the database, but will create a large amount of objects with only the ID inserted. The data will be loaded from the objects when needed by the regular load()-autocall. """ ids = cls.getAllIDs(where, orderBy=orderBy) # Instansiate a lot of them if len(cls._sqlPrimary) > 1: return [cls(*id) for id in ids] else: return [cls(id) for id in ids]
python
def getAll(cls, where=None, orderBy=None): """Retrieve all the objects. If a list of ``where`` clauses are given, they will be AND-ed and will limit the search. This will not load everything out from the database, but will create a large amount of objects with only the ID inserted. The data will be loaded from the objects when needed by the regular load()-autocall. """ ids = cls.getAllIDs(where, orderBy=orderBy) # Instansiate a lot of them if len(cls._sqlPrimary) > 1: return [cls(*id) for id in ids] else: return [cls(id) for id in ids]
[ "def", "getAll", "(", "cls", ",", "where", "=", "None", ",", "orderBy", "=", "None", ")", ":", "ids", "=", "cls", ".", "getAllIDs", "(", "where", ",", "orderBy", "=", "orderBy", ")", "# Instansiate a lot of them", "if", "len", "(", "cls", ".", "_sqlPrimary", ")", ">", "1", ":", "return", "[", "cls", "(", "*", "id", ")", "for", "id", "in", "ids", "]", "else", ":", "return", "[", "cls", "(", "id", ")", "for", "id", "in", "ids", "]" ]
Retrieve all the objects. If a list of ``where`` clauses are given, they will be AND-ed and will limit the search. This will not load everything out from the database, but will create a large amount of objects with only the ID inserted. The data will be loaded from the objects when needed by the regular load()-autocall.
[ "Retrieve", "all", "the", "objects", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L730-L746
251,318
stain/forgetSQL
lib/forgetSQL.py
Forgetter.getAllIterator
def getAllIterator(cls, where=None, buffer=100, useObject=None, orderBy=None): """Retrieve every object as an iterator. Possibly limitted by the where list of clauses that will be AND-ed. Since an iterator is returned, only ``buffer`` rows are loaded from the database at once. This is useful if you need to process all objects. If useObject is given, this object is returned each time, but with new data. This can be used to avoid creating many new objects when only one object is needed each time. """ (sql, fields) = cls._prepareSQL("SELECTALL", where, orderBy=orderBy) curs = cls.cursor() fetchedAt = time.time() curs.execute(sql) # We might start eating memory at this point def getNext(rows=[]): forgetter = cls if not rows: rows += curs.fetchmany(buffer) if not rows: curs.close() return None row = rows[0] del rows[0] try: idPositions = [fields.index(key) for key in cls._sqlPrimary] except ValueError: raise "Bad sqlPrimary, should be a list or tuple: %s" % cls._sqlPrimary ids = [row[pos] for pos in idPositions] if useObject: result = useObject result.reset() result._setID(ids) else: result = forgetter(*ids) result._loadFromRow(row, fields, curs) result._updated = fetchedAt return result return iter(getNext, None)
python
def getAllIterator(cls, where=None, buffer=100, useObject=None, orderBy=None): """Retrieve every object as an iterator. Possibly limitted by the where list of clauses that will be AND-ed. Since an iterator is returned, only ``buffer`` rows are loaded from the database at once. This is useful if you need to process all objects. If useObject is given, this object is returned each time, but with new data. This can be used to avoid creating many new objects when only one object is needed each time. """ (sql, fields) = cls._prepareSQL("SELECTALL", where, orderBy=orderBy) curs = cls.cursor() fetchedAt = time.time() curs.execute(sql) # We might start eating memory at this point def getNext(rows=[]): forgetter = cls if not rows: rows += curs.fetchmany(buffer) if not rows: curs.close() return None row = rows[0] del rows[0] try: idPositions = [fields.index(key) for key in cls._sqlPrimary] except ValueError: raise "Bad sqlPrimary, should be a list or tuple: %s" % cls._sqlPrimary ids = [row[pos] for pos in idPositions] if useObject: result = useObject result.reset() result._setID(ids) else: result = forgetter(*ids) result._loadFromRow(row, fields, curs) result._updated = fetchedAt return result return iter(getNext, None)
[ "def", "getAllIterator", "(", "cls", ",", "where", "=", "None", ",", "buffer", "=", "100", ",", "useObject", "=", "None", ",", "orderBy", "=", "None", ")", ":", "(", "sql", ",", "fields", ")", "=", "cls", ".", "_prepareSQL", "(", "\"SELECTALL\"", ",", "where", ",", "orderBy", "=", "orderBy", ")", "curs", "=", "cls", ".", "cursor", "(", ")", "fetchedAt", "=", "time", ".", "time", "(", ")", "curs", ".", "execute", "(", "sql", ")", "# We might start eating memory at this point", "def", "getNext", "(", "rows", "=", "[", "]", ")", ":", "forgetter", "=", "cls", "if", "not", "rows", ":", "rows", "+=", "curs", ".", "fetchmany", "(", "buffer", ")", "if", "not", "rows", ":", "curs", ".", "close", "(", ")", "return", "None", "row", "=", "rows", "[", "0", "]", "del", "rows", "[", "0", "]", "try", ":", "idPositions", "=", "[", "fields", ".", "index", "(", "key", ")", "for", "key", "in", "cls", ".", "_sqlPrimary", "]", "except", "ValueError", ":", "raise", "\"Bad sqlPrimary, should be a list or tuple: %s\"", "%", "cls", ".", "_sqlPrimary", "ids", "=", "[", "row", "[", "pos", "]", "for", "pos", "in", "idPositions", "]", "if", "useObject", ":", "result", "=", "useObject", "result", ".", "reset", "(", ")", "result", ".", "_setID", "(", "ids", ")", "else", ":", "result", "=", "forgetter", "(", "*", "ids", ")", "result", ".", "_loadFromRow", "(", "row", ",", "fields", ",", "curs", ")", "result", ".", "_updated", "=", "fetchedAt", "return", "result", "return", "iter", "(", "getNext", ",", "None", ")" ]
Retrieve every object as an iterator. Possibly limitted by the where list of clauses that will be AND-ed. Since an iterator is returned, only ``buffer`` rows are loaded from the database at once. This is useful if you need to process all objects. If useObject is given, this object is returned each time, but with new data. This can be used to avoid creating many new objects when only one object is needed each time.
[ "Retrieve", "every", "object", "as", "an", "iterator", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L751-L797
251,319
stain/forgetSQL
lib/forgetSQL.py
Forgetter.getAllIDs
def getAllIDs(cls, where=None, orderBy=None): """Retrive all the IDs, possibly matching the where clauses. Where should be some list of where clauses that will be joined with AND). Note that the result might be tuples if this table has a multivalue _sqlPrimary. """ (sql, fields) = cls._prepareSQL("SELECTALL", where, cls._sqlPrimary, orderBy=orderBy) curs = cls.cursor() curs.execute(sql) # We might start eating memory at this point rows = curs.fetchall() curs.close() result = [] idPositions = [fields.index(key) for key in cls._sqlPrimary] for row in rows: ids = [row[pos] for pos in idPositions] if len(idPositions) > 1: ids = tuple(ids) else: ids = ids[0] result.append((ids)) return result
python
def getAllIDs(cls, where=None, orderBy=None): """Retrive all the IDs, possibly matching the where clauses. Where should be some list of where clauses that will be joined with AND). Note that the result might be tuples if this table has a multivalue _sqlPrimary. """ (sql, fields) = cls._prepareSQL("SELECTALL", where, cls._sqlPrimary, orderBy=orderBy) curs = cls.cursor() curs.execute(sql) # We might start eating memory at this point rows = curs.fetchall() curs.close() result = [] idPositions = [fields.index(key) for key in cls._sqlPrimary] for row in rows: ids = [row[pos] for pos in idPositions] if len(idPositions) > 1: ids = tuple(ids) else: ids = ids[0] result.append((ids)) return result
[ "def", "getAllIDs", "(", "cls", ",", "where", "=", "None", ",", "orderBy", "=", "None", ")", ":", "(", "sql", ",", "fields", ")", "=", "cls", ".", "_prepareSQL", "(", "\"SELECTALL\"", ",", "where", ",", "cls", ".", "_sqlPrimary", ",", "orderBy", "=", "orderBy", ")", "curs", "=", "cls", ".", "cursor", "(", ")", "curs", ".", "execute", "(", "sql", ")", "# We might start eating memory at this point", "rows", "=", "curs", ".", "fetchall", "(", ")", "curs", ".", "close", "(", ")", "result", "=", "[", "]", "idPositions", "=", "[", "fields", ".", "index", "(", "key", ")", "for", "key", "in", "cls", ".", "_sqlPrimary", "]", "for", "row", "in", "rows", ":", "ids", "=", "[", "row", "[", "pos", "]", "for", "pos", "in", "idPositions", "]", "if", "len", "(", "idPositions", ")", ">", "1", ":", "ids", "=", "tuple", "(", "ids", ")", "else", ":", "ids", "=", "ids", "[", "0", "]", "result", ".", "append", "(", "(", "ids", ")", ")", "return", "result" ]
Retrive all the IDs, possibly matching the where clauses. Where should be some list of where clauses that will be joined with AND). Note that the result might be tuples if this table has a multivalue _sqlPrimary.
[ "Retrive", "all", "the", "IDs", "possibly", "matching", "the", "where", "clauses", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L801-L824
251,320
stain/forgetSQL
lib/forgetSQL.py
Forgetter.getAllText
def getAllText(cls, where=None, SEPERATOR=' ', orderBy=None): """Retrieve a list of of all possible instances of this class. The list is composed of tuples in the format (id, description) - where description is a string composed by the fields from cls._shortView, joint with SEPERATOR. """ (sql, fields) = cls._prepareSQL("SELECTALL", where, orderBy=orderBy) curs = cls.cursor() curs.execute(sql) # We might start eating memory at this point rows = curs.fetchall() curs.close() result = [] idPositions = [fields.index(key) for key in cls._sqlPrimary] shortPos = [fields.index(short) for short in cls._shortView] for row in rows: ids = [row[pos] for pos in idPositions] if len(idPositions) > 1: ids = tuple(ids) else: ids = ids[0] text = SEPERATOR.join([str(row[pos]) for pos in shortPos]) result.append((ids, text)) return result
python
def getAllText(cls, where=None, SEPERATOR=' ', orderBy=None): """Retrieve a list of of all possible instances of this class. The list is composed of tuples in the format (id, description) - where description is a string composed by the fields from cls._shortView, joint with SEPERATOR. """ (sql, fields) = cls._prepareSQL("SELECTALL", where, orderBy=orderBy) curs = cls.cursor() curs.execute(sql) # We might start eating memory at this point rows = curs.fetchall() curs.close() result = [] idPositions = [fields.index(key) for key in cls._sqlPrimary] shortPos = [fields.index(short) for short in cls._shortView] for row in rows: ids = [row[pos] for pos in idPositions] if len(idPositions) > 1: ids = tuple(ids) else: ids = ids[0] text = SEPERATOR.join([str(row[pos]) for pos in shortPos]) result.append((ids, text)) return result
[ "def", "getAllText", "(", "cls", ",", "where", "=", "None", ",", "SEPERATOR", "=", "' '", ",", "orderBy", "=", "None", ")", ":", "(", "sql", ",", "fields", ")", "=", "cls", ".", "_prepareSQL", "(", "\"SELECTALL\"", ",", "where", ",", "orderBy", "=", "orderBy", ")", "curs", "=", "cls", ".", "cursor", "(", ")", "curs", ".", "execute", "(", "sql", ")", "# We might start eating memory at this point", "rows", "=", "curs", ".", "fetchall", "(", ")", "curs", ".", "close", "(", ")", "result", "=", "[", "]", "idPositions", "=", "[", "fields", ".", "index", "(", "key", ")", "for", "key", "in", "cls", ".", "_sqlPrimary", "]", "shortPos", "=", "[", "fields", ".", "index", "(", "short", ")", "for", "short", "in", "cls", ".", "_shortView", "]", "for", "row", "in", "rows", ":", "ids", "=", "[", "row", "[", "pos", "]", "for", "pos", "in", "idPositions", "]", "if", "len", "(", "idPositions", ")", ">", "1", ":", "ids", "=", "tuple", "(", "ids", ")", "else", ":", "ids", "=", "ids", "[", "0", "]", "text", "=", "SEPERATOR", ".", "join", "(", "[", "str", "(", "row", "[", "pos", "]", ")", "for", "pos", "in", "shortPos", "]", ")", "result", ".", "append", "(", "(", "ids", ",", "text", ")", ")", "return", "result" ]
Retrieve a list of of all possible instances of this class. The list is composed of tuples in the format (id, description) - where description is a string composed by the fields from cls._shortView, joint with SEPERATOR.
[ "Retrieve", "a", "list", "of", "of", "all", "possible", "instances", "of", "this", "class", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L828-L852
251,321
stain/forgetSQL
lib/forgetSQL.py
Forgetter.getChildren
def getChildren(self, forgetter, field=None, where=None, orderBy=None): """Return the children that links to me. That means that I have to be listed in their _userClasses somehow. If field is specified, that field in my children is used as the pointer to me. Use this if you have multiple fields referring to my class. """ if type(where) in (types.StringType, types.UnicodeType): where = (where,) if not field: for (i_field, i_class) in forgetter._userClasses.items(): if isinstance(self, i_class): field = i_field break # first one found is ok :=) if not field: raise "No field found, check forgetter's _userClasses" sqlname = forgetter._sqlFields[field] myID = self._getID()[0] # assuming single-primary ! whereList = ["%s='%s'" % (sqlname, myID)] if where: whereList.extend(where) return forgetter.getAll(whereList, orderBy=orderBy)
python
def getChildren(self, forgetter, field=None, where=None, orderBy=None): """Return the children that links to me. That means that I have to be listed in their _userClasses somehow. If field is specified, that field in my children is used as the pointer to me. Use this if you have multiple fields referring to my class. """ if type(where) in (types.StringType, types.UnicodeType): where = (where,) if not field: for (i_field, i_class) in forgetter._userClasses.items(): if isinstance(self, i_class): field = i_field break # first one found is ok :=) if not field: raise "No field found, check forgetter's _userClasses" sqlname = forgetter._sqlFields[field] myID = self._getID()[0] # assuming single-primary ! whereList = ["%s='%s'" % (sqlname, myID)] if where: whereList.extend(where) return forgetter.getAll(whereList, orderBy=orderBy)
[ "def", "getChildren", "(", "self", ",", "forgetter", ",", "field", "=", "None", ",", "where", "=", "None", ",", "orderBy", "=", "None", ")", ":", "if", "type", "(", "where", ")", "in", "(", "types", ".", "StringType", ",", "types", ".", "UnicodeType", ")", ":", "where", "=", "(", "where", ",", ")", "if", "not", "field", ":", "for", "(", "i_field", ",", "i_class", ")", "in", "forgetter", ".", "_userClasses", ".", "items", "(", ")", ":", "if", "isinstance", "(", "self", ",", "i_class", ")", ":", "field", "=", "i_field", "break", "# first one found is ok :=)", "if", "not", "field", ":", "raise", "\"No field found, check forgetter's _userClasses\"", "sqlname", "=", "forgetter", ".", "_sqlFields", "[", "field", "]", "myID", "=", "self", ".", "_getID", "(", ")", "[", "0", "]", "# assuming single-primary !", "whereList", "=", "[", "\"%s='%s'\"", "%", "(", "sqlname", ",", "myID", ")", "]", "if", "where", ":", "whereList", ".", "extend", "(", "where", ")", "return", "forgetter", ".", "getAll", "(", "whereList", ",", "orderBy", "=", "orderBy", ")" ]
Return the children that links to me. That means that I have to be listed in their _userClasses somehow. If field is specified, that field in my children is used as the pointer to me. Use this if you have multiple fields referring to my class.
[ "Return", "the", "children", "that", "links", "to", "me", "." ]
2e13f983020b121fd75a95fcafce3ea75573fb6b
https://github.com/stain/forgetSQL/blob/2e13f983020b121fd75a95fcafce3ea75573fb6b/lib/forgetSQL.py#L856-L880
251,322
OpenVolunteeringPlatform/django-ovp-projects
ovp_projects/decorators.py
hide_address
def hide_address(func): """ Used to decorate Serializer.to_representation method. It hides the address field if the Project has 'hidden_address' == True and the request user is neither owner or member of the organization """ @wraps(func) def _impl(self, instance): # We pop address field to avoid AttributeError on default Serializer.to_representation if instance.hidden_address: for i, field in enumerate(self._readable_fields): if field.field_name == "address": address = self._readable_fields.pop(i) ret = func(self, instance) self._readable_fields.insert(i, address) # Put address back request = self.context["request"] # Check if user is organization member is_organization_member = False try: if instance.organization is not None: is_organization_member = (request.user in instance.organization.members.all()) except Organization.DoesNotExist: # pragma: no cover pass # Add address representation if request.user == instance.owner or is_organization_member: ret["address"] = self.fields["address"].to_representation(instance.address) else: ret["address"] = None else: ret = func(self, instance) return ret return _impl
python
def hide_address(func): """ Used to decorate Serializer.to_representation method. It hides the address field if the Project has 'hidden_address' == True and the request user is neither owner or member of the organization """ @wraps(func) def _impl(self, instance): # We pop address field to avoid AttributeError on default Serializer.to_representation if instance.hidden_address: for i, field in enumerate(self._readable_fields): if field.field_name == "address": address = self._readable_fields.pop(i) ret = func(self, instance) self._readable_fields.insert(i, address) # Put address back request = self.context["request"] # Check if user is organization member is_organization_member = False try: if instance.organization is not None: is_organization_member = (request.user in instance.organization.members.all()) except Organization.DoesNotExist: # pragma: no cover pass # Add address representation if request.user == instance.owner or is_organization_member: ret["address"] = self.fields["address"].to_representation(instance.address) else: ret["address"] = None else: ret = func(self, instance) return ret return _impl
[ "def", "hide_address", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "_impl", "(", "self", ",", "instance", ")", ":", "# We pop address field to avoid AttributeError on default Serializer.to_representation", "if", "instance", ".", "hidden_address", ":", "for", "i", ",", "field", "in", "enumerate", "(", "self", ".", "_readable_fields", ")", ":", "if", "field", ".", "field_name", "==", "\"address\"", ":", "address", "=", "self", ".", "_readable_fields", ".", "pop", "(", "i", ")", "ret", "=", "func", "(", "self", ",", "instance", ")", "self", ".", "_readable_fields", ".", "insert", "(", "i", ",", "address", ")", "# Put address back", "request", "=", "self", ".", "context", "[", "\"request\"", "]", "# Check if user is organization member", "is_organization_member", "=", "False", "try", ":", "if", "instance", ".", "organization", "is", "not", "None", ":", "is_organization_member", "=", "(", "request", ".", "user", "in", "instance", ".", "organization", ".", "members", ".", "all", "(", ")", ")", "except", "Organization", ".", "DoesNotExist", ":", "# pragma: no cover", "pass", "# Add address representation", "if", "request", ".", "user", "==", "instance", ".", "owner", "or", "is_organization_member", ":", "ret", "[", "\"address\"", "]", "=", "self", ".", "fields", "[", "\"address\"", "]", ".", "to_representation", "(", "instance", ".", "address", ")", "else", ":", "ret", "[", "\"address\"", "]", "=", "None", "else", ":", "ret", "=", "func", "(", "self", ",", "instance", ")", "return", "ret", "return", "_impl" ]
Used to decorate Serializer.to_representation method. It hides the address field if the Project has 'hidden_address' == True and the request user is neither owner or member of the organization
[ "Used", "to", "decorate", "Serializer", ".", "to_representation", "method", ".", "It", "hides", "the", "address", "field", "if", "the", "Project", "has", "hidden_address", "==", "True", "and", "the", "request", "user", "is", "neither", "owner", "or", "member", "of", "the", "organization" ]
239e27027ca99c7b44ee4f30bf55d06439d49251
https://github.com/OpenVolunteeringPlatform/django-ovp-projects/blob/239e27027ca99c7b44ee4f30bf55d06439d49251/ovp_projects/decorators.py#L5-L40
251,323
OpenVolunteeringPlatform/django-ovp-projects
ovp_projects/decorators.py
add_current_user_is_applied_representation
def add_current_user_is_applied_representation(func): """ Used to decorate Serializer.to_representation method. It sets the field "current_user_is_applied" if the user is applied to the project """ @wraps(func) def _impl(self, instance): # We pop current_user_is_applied field to avoid AttributeError on default Serializer.to_representation ret = func(self, instance) user = self.context["request"].user applied = False if not user.is_anonymous(): try: applied = models.Apply.objects.filter(user=user, project=instance).count() > 0 except: pass ret["current_user_is_applied"] = applied return ret return _impl
python
def add_current_user_is_applied_representation(func): """ Used to decorate Serializer.to_representation method. It sets the field "current_user_is_applied" if the user is applied to the project """ @wraps(func) def _impl(self, instance): # We pop current_user_is_applied field to avoid AttributeError on default Serializer.to_representation ret = func(self, instance) user = self.context["request"].user applied = False if not user.is_anonymous(): try: applied = models.Apply.objects.filter(user=user, project=instance).count() > 0 except: pass ret["current_user_is_applied"] = applied return ret return _impl
[ "def", "add_current_user_is_applied_representation", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "_impl", "(", "self", ",", "instance", ")", ":", "# We pop current_user_is_applied field to avoid AttributeError on default Serializer.to_representation", "ret", "=", "func", "(", "self", ",", "instance", ")", "user", "=", "self", ".", "context", "[", "\"request\"", "]", ".", "user", "applied", "=", "False", "if", "not", "user", ".", "is_anonymous", "(", ")", ":", "try", ":", "applied", "=", "models", ".", "Apply", ".", "objects", ".", "filter", "(", "user", "=", "user", ",", "project", "=", "instance", ")", ".", "count", "(", ")", ">", "0", "except", ":", "pass", "ret", "[", "\"current_user_is_applied\"", "]", "=", "applied", "return", "ret", "return", "_impl" ]
Used to decorate Serializer.to_representation method. It sets the field "current_user_is_applied" if the user is applied to the project
[ "Used", "to", "decorate", "Serializer", ".", "to_representation", "method", ".", "It", "sets", "the", "field", "current_user_is_applied", "if", "the", "user", "is", "applied", "to", "the", "project" ]
239e27027ca99c7b44ee4f30bf55d06439d49251
https://github.com/OpenVolunteeringPlatform/django-ovp-projects/blob/239e27027ca99c7b44ee4f30bf55d06439d49251/ovp_projects/decorators.py#L43-L63
251,324
klorenz/python-argdeco
argdeco/main.py
Main.configure
def configure(self, debug=None, quiet=None, verbosity=None, compile=None, compiler_factory=None, **kwargs): """configure managed args """ if debug is not None: self.arg_debug = debug if quiet is not None: self.arg_quiet = quiet if verbosity is not None: self.arg_verbosity = verbosity if compile is not None: self.compile = compile if compiler_factory is not None: self.compiler_factory = compiler_factory if kwargs: # other keyword arguments update command attribute self.command.update(**kwargs)
python
def configure(self, debug=None, quiet=None, verbosity=None, compile=None, compiler_factory=None, **kwargs): """configure managed args """ if debug is not None: self.arg_debug = debug if quiet is not None: self.arg_quiet = quiet if verbosity is not None: self.arg_verbosity = verbosity if compile is not None: self.compile = compile if compiler_factory is not None: self.compiler_factory = compiler_factory if kwargs: # other keyword arguments update command attribute self.command.update(**kwargs)
[ "def", "configure", "(", "self", ",", "debug", "=", "None", ",", "quiet", "=", "None", ",", "verbosity", "=", "None", ",", "compile", "=", "None", ",", "compiler_factory", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "debug", "is", "not", "None", ":", "self", ".", "arg_debug", "=", "debug", "if", "quiet", "is", "not", "None", ":", "self", ".", "arg_quiet", "=", "quiet", "if", "verbosity", "is", "not", "None", ":", "self", ".", "arg_verbosity", "=", "verbosity", "if", "compile", "is", "not", "None", ":", "self", ".", "compile", "=", "compile", "if", "compiler_factory", "is", "not", "None", ":", "self", ".", "compiler_factory", "=", "compiler_factory", "if", "kwargs", ":", "# other keyword arguments update command attribute", "self", ".", "command", ".", "update", "(", "*", "*", "kwargs", ")" ]
configure managed args
[ "configure", "managed", "args" ]
8d01acef8c19d6883873689d017b14857876412d
https://github.com/klorenz/python-argdeco/blob/8d01acef8c19d6883873689d017b14857876412d/argdeco/main.py#L212-L228
251,325
klorenz/python-argdeco
argdeco/main.py
Main.uninstall_bash_completion
def uninstall_bash_completion(self, script_name=None, dest="~/.bashrc"): '''remove line to activate bash_completion for given script_name from given dest You can use this for letting the user uninstall bash_completion:: from argdeco import command, main @command("uninstall-bash-completion", arg('--dest', help="destination", default="~/.bashrc") ) def uninstall_bash_completion(dest): main.uninstall_bash_completion(dest=dest) ''' if 'USERPROFILE' in os.environ and 'HOME' not in os.environ: os.environ['HOME'] = os.environ['USERPROFILE'] dest = expanduser(dest) if script_name is None: script_name = sys.argv[0] lines = [] remove_line = 'register-python-argcomplete %s' % script_name with open(dest, 'r') as f: for line in f: if line.strip().startswith('#'): lines.append(line) continue if remove_line in line: continue lines.append(line) with open(dest, 'w') as f: f.write(''.join(lines))
python
def uninstall_bash_completion(self, script_name=None, dest="~/.bashrc"): '''remove line to activate bash_completion for given script_name from given dest You can use this for letting the user uninstall bash_completion:: from argdeco import command, main @command("uninstall-bash-completion", arg('--dest', help="destination", default="~/.bashrc") ) def uninstall_bash_completion(dest): main.uninstall_bash_completion(dest=dest) ''' if 'USERPROFILE' in os.environ and 'HOME' not in os.environ: os.environ['HOME'] = os.environ['USERPROFILE'] dest = expanduser(dest) if script_name is None: script_name = sys.argv[0] lines = [] remove_line = 'register-python-argcomplete %s' % script_name with open(dest, 'r') as f: for line in f: if line.strip().startswith('#'): lines.append(line) continue if remove_line in line: continue lines.append(line) with open(dest, 'w') as f: f.write(''.join(lines))
[ "def", "uninstall_bash_completion", "(", "self", ",", "script_name", "=", "None", ",", "dest", "=", "\"~/.bashrc\"", ")", ":", "if", "'USERPROFILE'", "in", "os", ".", "environ", "and", "'HOME'", "not", "in", "os", ".", "environ", ":", "os", ".", "environ", "[", "'HOME'", "]", "=", "os", ".", "environ", "[", "'USERPROFILE'", "]", "dest", "=", "expanduser", "(", "dest", ")", "if", "script_name", "is", "None", ":", "script_name", "=", "sys", ".", "argv", "[", "0", "]", "lines", "=", "[", "]", "remove_line", "=", "'register-python-argcomplete %s'", "%", "script_name", "with", "open", "(", "dest", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "if", "line", ".", "strip", "(", ")", ".", "startswith", "(", "'#'", ")", ":", "lines", ".", "append", "(", "line", ")", "continue", "if", "remove_line", "in", "line", ":", "continue", "lines", ".", "append", "(", "line", ")", "with", "open", "(", "dest", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "''", ".", "join", "(", "lines", ")", ")" ]
remove line to activate bash_completion for given script_name from given dest You can use this for letting the user uninstall bash_completion:: from argdeco import command, main @command("uninstall-bash-completion", arg('--dest', help="destination", default="~/.bashrc") ) def uninstall_bash_completion(dest): main.uninstall_bash_completion(dest=dest)
[ "remove", "line", "to", "activate", "bash_completion", "for", "given", "script_name", "from", "given", "dest" ]
8d01acef8c19d6883873689d017b14857876412d
https://github.com/klorenz/python-argdeco/blob/8d01acef8c19d6883873689d017b14857876412d/argdeco/main.py#L289-L318
251,326
klorenz/python-argdeco
argdeco/main.py
Main.install_bash_completion
def install_bash_completion(self, script_name=None, dest="~/.bashrc"): '''add line to activate bash_completion for given script_name into dest You can use this for letting the user install bash_completion:: from argdeco import command, main @command("install-bash-completion", arg('--dest', help="destination", default="~/.bashrc") ) def install_bash_completion(dest): main.install_bash_completion(dest=dest) ''' if 'USERPROFILE' in os.environ and 'HOME' not in os.environ: os.environ['HOME'] = os.environ['USERPROFILE'] dest = expanduser(dest) if script_name is None: script_name = sys.argv[0] self.uninstall_bash_completion(script_name=script_name, dest=dest) with open(dest, 'a') as f: f.write('eval "$(register-python-argcomplete %s)"\n' % script_name)
python
def install_bash_completion(self, script_name=None, dest="~/.bashrc"): '''add line to activate bash_completion for given script_name into dest You can use this for letting the user install bash_completion:: from argdeco import command, main @command("install-bash-completion", arg('--dest', help="destination", default="~/.bashrc") ) def install_bash_completion(dest): main.install_bash_completion(dest=dest) ''' if 'USERPROFILE' in os.environ and 'HOME' not in os.environ: os.environ['HOME'] = os.environ['USERPROFILE'] dest = expanduser(dest) if script_name is None: script_name = sys.argv[0] self.uninstall_bash_completion(script_name=script_name, dest=dest) with open(dest, 'a') as f: f.write('eval "$(register-python-argcomplete %s)"\n' % script_name)
[ "def", "install_bash_completion", "(", "self", ",", "script_name", "=", "None", ",", "dest", "=", "\"~/.bashrc\"", ")", ":", "if", "'USERPROFILE'", "in", "os", ".", "environ", "and", "'HOME'", "not", "in", "os", ".", "environ", ":", "os", ".", "environ", "[", "'HOME'", "]", "=", "os", ".", "environ", "[", "'USERPROFILE'", "]", "dest", "=", "expanduser", "(", "dest", ")", "if", "script_name", "is", "None", ":", "script_name", "=", "sys", ".", "argv", "[", "0", "]", "self", ".", "uninstall_bash_completion", "(", "script_name", "=", "script_name", ",", "dest", "=", "dest", ")", "with", "open", "(", "dest", ",", "'a'", ")", "as", "f", ":", "f", ".", "write", "(", "'eval \"$(register-python-argcomplete %s)\"\\n'", "%", "script_name", ")" ]
add line to activate bash_completion for given script_name into dest You can use this for letting the user install bash_completion:: from argdeco import command, main @command("install-bash-completion", arg('--dest', help="destination", default="~/.bashrc") ) def install_bash_completion(dest): main.install_bash_completion(dest=dest)
[ "add", "line", "to", "activate", "bash_completion", "for", "given", "script_name", "into", "dest" ]
8d01acef8c19d6883873689d017b14857876412d
https://github.com/klorenz/python-argdeco/blob/8d01acef8c19d6883873689d017b14857876412d/argdeco/main.py#L320-L342
251,327
refinery29/chassis
chassis/util/params.py
_fetch_arguments
def _fetch_arguments(handler, method): """Get the arguments depending on the type of HTTP method.""" if method.__name__ == 'get': arguments = {} for key, value in six.iteritems(handler.request.arguments): # Tornado supports comma-separated lists of values in # parameters. We're undoing that here, and if a list # is expected the _validate method can handle it. if isinstance(value, list): arguments[key] = ','.join(value) else: arguments[key] = value else: # post, put, patch, delete? arguments = handler.get_post_arguments() return arguments
python
def _fetch_arguments(handler, method): """Get the arguments depending on the type of HTTP method.""" if method.__name__ == 'get': arguments = {} for key, value in six.iteritems(handler.request.arguments): # Tornado supports comma-separated lists of values in # parameters. We're undoing that here, and if a list # is expected the _validate method can handle it. if isinstance(value, list): arguments[key] = ','.join(value) else: arguments[key] = value else: # post, put, patch, delete? arguments = handler.get_post_arguments() return arguments
[ "def", "_fetch_arguments", "(", "handler", ",", "method", ")", ":", "if", "method", ".", "__name__", "==", "'get'", ":", "arguments", "=", "{", "}", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "handler", ".", "request", ".", "arguments", ")", ":", "# Tornado supports comma-separated lists of values in", "# parameters. We're undoing that here, and if a list", "# is expected the _validate method can handle it.", "if", "isinstance", "(", "value", ",", "list", ")", ":", "arguments", "[", "key", "]", "=", "','", ".", "join", "(", "value", ")", "else", ":", "arguments", "[", "key", "]", "=", "value", "else", ":", "# post, put, patch, delete?", "arguments", "=", "handler", ".", "get_post_arguments", "(", ")", "return", "arguments" ]
Get the arguments depending on the type of HTTP method.
[ "Get", "the", "arguments", "depending", "on", "the", "type", "of", "HTTP", "method", "." ]
1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192
https://github.com/refinery29/chassis/blob/1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192/chassis/util/params.py#L10-L26
251,328
refinery29/chassis
chassis/util/params.py
_apply_validator_chain
def _apply_validator_chain(chain, value, handler): """Apply validators in sequence to a value.""" if hasattr(chain, 'validate'): # not a list chain = [chain, ] for validator in chain: if hasattr(validator, 'validate'): value = validator.validate(value, handler) else: raise web.HTTPError(500) return value
python
def _apply_validator_chain(chain, value, handler): """Apply validators in sequence to a value.""" if hasattr(chain, 'validate'): # not a list chain = [chain, ] for validator in chain: if hasattr(validator, 'validate'): value = validator.validate(value, handler) else: raise web.HTTPError(500) return value
[ "def", "_apply_validator_chain", "(", "chain", ",", "value", ",", "handler", ")", ":", "if", "hasattr", "(", "chain", ",", "'validate'", ")", ":", "# not a list", "chain", "=", "[", "chain", ",", "]", "for", "validator", "in", "chain", ":", "if", "hasattr", "(", "validator", ",", "'validate'", ")", ":", "value", "=", "validator", ".", "validate", "(", "value", ",", "handler", ")", "else", ":", "raise", "web", ".", "HTTPError", "(", "500", ")", "return", "value" ]
Apply validators in sequence to a value.
[ "Apply", "validators", "in", "sequence", "to", "a", "value", "." ]
1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192
https://github.com/refinery29/chassis/blob/1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192/chassis/util/params.py#L29-L40
251,329
refinery29/chassis
chassis/util/params.py
_parse_arguments
def _parse_arguments(self, method, parameters): """Parse arguments to method, returning a dictionary.""" # TODO: Consider raising an exception if there are extra arguments. arguments = _fetch_arguments(self, method) arg_dict = {} errors = [] for key, properties in parameters: if key in arguments: value = arguments[key] try: arg_dict[key] = _apply_validator_chain( properties.get('validators', []), value, self) except validators.ValidationError as err: errors.append(err) else: if properties.get('required', False): raise web.HTTPError( 400, ('Missing required parameter: %s' % (key, )) ) else: if properties.get('default', None) is not None: arg_dict[key] = properties['default'] else: arg_dict[key] = None if errors: raise web.HTTPError(400, 'There were %s errors' % len(errors)) return arg_dict
python
def _parse_arguments(self, method, parameters): """Parse arguments to method, returning a dictionary.""" # TODO: Consider raising an exception if there are extra arguments. arguments = _fetch_arguments(self, method) arg_dict = {} errors = [] for key, properties in parameters: if key in arguments: value = arguments[key] try: arg_dict[key] = _apply_validator_chain( properties.get('validators', []), value, self) except validators.ValidationError as err: errors.append(err) else: if properties.get('required', False): raise web.HTTPError( 400, ('Missing required parameter: %s' % (key, )) ) else: if properties.get('default', None) is not None: arg_dict[key] = properties['default'] else: arg_dict[key] = None if errors: raise web.HTTPError(400, 'There were %s errors' % len(errors)) return arg_dict
[ "def", "_parse_arguments", "(", "self", ",", "method", ",", "parameters", ")", ":", "# TODO: Consider raising an exception if there are extra arguments.", "arguments", "=", "_fetch_arguments", "(", "self", ",", "method", ")", "arg_dict", "=", "{", "}", "errors", "=", "[", "]", "for", "key", ",", "properties", "in", "parameters", ":", "if", "key", "in", "arguments", ":", "value", "=", "arguments", "[", "key", "]", "try", ":", "arg_dict", "[", "key", "]", "=", "_apply_validator_chain", "(", "properties", ".", "get", "(", "'validators'", ",", "[", "]", ")", ",", "value", ",", "self", ")", "except", "validators", ".", "ValidationError", "as", "err", ":", "errors", ".", "append", "(", "err", ")", "else", ":", "if", "properties", ".", "get", "(", "'required'", ",", "False", ")", ":", "raise", "web", ".", "HTTPError", "(", "400", ",", "(", "'Missing required parameter: %s'", "%", "(", "key", ",", ")", ")", ")", "else", ":", "if", "properties", ".", "get", "(", "'default'", ",", "None", ")", "is", "not", "None", ":", "arg_dict", "[", "key", "]", "=", "properties", "[", "'default'", "]", "else", ":", "arg_dict", "[", "key", "]", "=", "None", "if", "errors", ":", "raise", "web", ".", "HTTPError", "(", "400", ",", "'There were %s errors'", "%", "len", "(", "errors", ")", ")", "return", "arg_dict" ]
Parse arguments to method, returning a dictionary.
[ "Parse", "arguments", "to", "method", "returning", "a", "dictionary", "." ]
1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192
https://github.com/refinery29/chassis/blob/1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192/chassis/util/params.py#L43-L75
251,330
refinery29/chassis
chassis/util/params.py
parse
def parse(parameters): """Decorator to parse parameters according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: ('<param_name>', {'validate': [<ValidatorClass>,...], <options...>}) Usage: @chassis.util.parameters.parse([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, email=None, password=None): # Render JSON for the provided parameters self.render_json({'email': email, 'password': password}) """ # pylint: disable=protected-access @decorators.include_original def decorate(method): """Setup returns this decorator, which is called on the method.""" def call(self, *args): """This is called whenever the decorated method is invoked.""" kwargs = _parse_arguments(self, method, parameters) return method(self, *args, **kwargs) # TODO: Autogenerate documentation data for parameters. return call return decorate
python
def parse(parameters): """Decorator to parse parameters according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: ('<param_name>', {'validate': [<ValidatorClass>,...], <options...>}) Usage: @chassis.util.parameters.parse([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, email=None, password=None): # Render JSON for the provided parameters self.render_json({'email': email, 'password': password}) """ # pylint: disable=protected-access @decorators.include_original def decorate(method): """Setup returns this decorator, which is called on the method.""" def call(self, *args): """This is called whenever the decorated method is invoked.""" kwargs = _parse_arguments(self, method, parameters) return method(self, *args, **kwargs) # TODO: Autogenerate documentation data for parameters. return call return decorate
[ "def", "parse", "(", "parameters", ")", ":", "# pylint: disable=protected-access", "@", "decorators", ".", "include_original", "def", "decorate", "(", "method", ")", ":", "\"\"\"Setup returns this decorator, which is called on the method.\"\"\"", "def", "call", "(", "self", ",", "*", "args", ")", ":", "\"\"\"This is called whenever the decorated method is invoked.\"\"\"", "kwargs", "=", "_parse_arguments", "(", "self", ",", "method", ",", "parameters", ")", "return", "method", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# TODO: Autogenerate documentation data for parameters.", "return", "call", "return", "decorate" ]
Decorator to parse parameters according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: ('<param_name>', {'validate': [<ValidatorClass>,...], <options...>}) Usage: @chassis.util.parameters.parse([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, email=None, password=None): # Render JSON for the provided parameters self.render_json({'email': email, 'password': password})
[ "Decorator", "to", "parse", "parameters", "according", "to", "a", "set", "of", "criteria", "." ]
1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192
https://github.com/refinery29/chassis/blob/1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192/chassis/util/params.py#L78-L111
251,331
refinery29/chassis
chassis/util/params.py
parse_dict
def parse_dict(parameters): """Decorator to parse parameters as a dict according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: ('<param_name>', {'validate': [<ValidatorClass>,...], <options...>}) Usage: @chassis.util.parameters.parse_dict([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, data): # Render JSON for the provided parameters self.render_json({'email': data['email'], 'password': data['password']}) """ # pylint: disable=protected-access @decorators.include_original def decorate(method): """Setup returns this decorator, which is called on the method.""" def call(self, *args): """This is called whenever the decorated method is invoked.""" arg_dict = _parse_arguments(self, method, parameters) return method(self, *args, data=arg_dict) # TODO: Autogenerate documentation data for parameters. return call return decorate
python
def parse_dict(parameters): """Decorator to parse parameters as a dict according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: ('<param_name>', {'validate': [<ValidatorClass>,...], <options...>}) Usage: @chassis.util.parameters.parse_dict([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, data): # Render JSON for the provided parameters self.render_json({'email': data['email'], 'password': data['password']}) """ # pylint: disable=protected-access @decorators.include_original def decorate(method): """Setup returns this decorator, which is called on the method.""" def call(self, *args): """This is called whenever the decorated method is invoked.""" arg_dict = _parse_arguments(self, method, parameters) return method(self, *args, data=arg_dict) # TODO: Autogenerate documentation data for parameters. return call return decorate
[ "def", "parse_dict", "(", "parameters", ")", ":", "# pylint: disable=protected-access", "@", "decorators", ".", "include_original", "def", "decorate", "(", "method", ")", ":", "\"\"\"Setup returns this decorator, which is called on the method.\"\"\"", "def", "call", "(", "self", ",", "*", "args", ")", ":", "\"\"\"This is called whenever the decorated method is invoked.\"\"\"", "arg_dict", "=", "_parse_arguments", "(", "self", ",", "method", ",", "parameters", ")", "return", "method", "(", "self", ",", "*", "args", ",", "data", "=", "arg_dict", ")", "# TODO: Autogenerate documentation data for parameters.", "return", "call", "return", "decorate" ]
Decorator to parse parameters as a dict according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: ('<param_name>', {'validate': [<ValidatorClass>,...], <options...>}) Usage: @chassis.util.parameters.parse_dict([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, data): # Render JSON for the provided parameters self.render_json({'email': data['email'], 'password': data['password']})
[ "Decorator", "to", "parse", "parameters", "as", "a", "dict", "according", "to", "a", "set", "of", "criteria", "." ]
1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192
https://github.com/refinery29/chassis/blob/1238d5214cbb8f3e1fe7c0dc2fa72f45bf085192/chassis/util/params.py#L114-L148
251,332
fstab50/metal
metal/logd.py
mode_assignment
def mode_assignment(arg): """ Translates arg to enforce proper assignment """ arg = arg.upper() stream_args = ('STREAM', 'CONSOLE', 'STDOUT') try: if arg in stream_args: return 'STREAM' else: return arg except Exception: return None
python
def mode_assignment(arg): """ Translates arg to enforce proper assignment """ arg = arg.upper() stream_args = ('STREAM', 'CONSOLE', 'STDOUT') try: if arg in stream_args: return 'STREAM' else: return arg except Exception: return None
[ "def", "mode_assignment", "(", "arg", ")", ":", "arg", "=", "arg", ".", "upper", "(", ")", "stream_args", "=", "(", "'STREAM'", ",", "'CONSOLE'", ",", "'STDOUT'", ")", "try", ":", "if", "arg", "in", "stream_args", ":", "return", "'STREAM'", "else", ":", "return", "arg", "except", "Exception", ":", "return", "None" ]
Translates arg to enforce proper assignment
[ "Translates", "arg", "to", "enforce", "proper", "assignment" ]
0488bbdd516a508909267cc44191f632e21156ba
https://github.com/fstab50/metal/blob/0488bbdd516a508909267cc44191f632e21156ba/metal/logd.py#L15-L27
251,333
boronine/discipline
discipline/models.py
Editor.save_object
def save_object(self, obj): """Save an object with Discipline Only argument is a Django object. This function saves the object (regardless of whether it already exists or not) and registers with Discipline, creating a new Action object. Do not use obj.save()! """ obj.save() try: save_object(obj, editor=self) except DisciplineException: pass
python
def save_object(self, obj): """Save an object with Discipline Only argument is a Django object. This function saves the object (regardless of whether it already exists or not) and registers with Discipline, creating a new Action object. Do not use obj.save()! """ obj.save() try: save_object(obj, editor=self) except DisciplineException: pass
[ "def", "save_object", "(", "self", ",", "obj", ")", ":", "obj", ".", "save", "(", ")", "try", ":", "save_object", "(", "obj", ",", "editor", "=", "self", ")", "except", "DisciplineException", ":", "pass" ]
Save an object with Discipline Only argument is a Django object. This function saves the object (regardless of whether it already exists or not) and registers with Discipline, creating a new Action object. Do not use obj.save()!
[ "Save", "an", "object", "with", "Discipline" ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L112-L123
251,334
boronine/discipline
discipline/models.py
Editor.delete_object
def delete_object(self, obj, post_delete=False): """Delete an object with Discipline Only argument is a Django object. Analogous to Editor.save_object. """ # Collect related objects that will be deleted by cascading links = [rel.get_accessor_name() for rel in \ obj._meta.get_all_related_objects()] # Recursively delete each of them for link in links: objects = getattr(obj, link).all() for o in objects: self.delete_object(o, post_delete) # Delete the actual object self._delete_object(obj, post_delete)
python
def delete_object(self, obj, post_delete=False): """Delete an object with Discipline Only argument is a Django object. Analogous to Editor.save_object. """ # Collect related objects that will be deleted by cascading links = [rel.get_accessor_name() for rel in \ obj._meta.get_all_related_objects()] # Recursively delete each of them for link in links: objects = getattr(obj, link).all() for o in objects: self.delete_object(o, post_delete) # Delete the actual object self._delete_object(obj, post_delete)
[ "def", "delete_object", "(", "self", ",", "obj", ",", "post_delete", "=", "False", ")", ":", "# Collect related objects that will be deleted by cascading", "links", "=", "[", "rel", ".", "get_accessor_name", "(", ")", "for", "rel", "in", "obj", ".", "_meta", ".", "get_all_related_objects", "(", ")", "]", "# Recursively delete each of them", "for", "link", "in", "links", ":", "objects", "=", "getattr", "(", "obj", ",", "link", ")", ".", "all", "(", ")", "for", "o", "in", "objects", ":", "self", ".", "delete_object", "(", "o", ",", "post_delete", ")", "# Delete the actual object", "self", ".", "_delete_object", "(", "obj", ",", "post_delete", ")" ]
Delete an object with Discipline Only argument is a Django object. Analogous to Editor.save_object.
[ "Delete", "an", "object", "with", "Discipline" ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L125-L139
251,335
boronine/discipline
discipline/models.py
Action._description
def _description(self): """A concise html explanation of this Action.""" inst = self.timemachine.presently if self.action_type == "dl": return "Deleted %s" % inst.content_type.name elif self.action_type == "cr": return "Created %s" % inst._object_type_html() else: return "Modified %s" % inst._object_type_html()
python
def _description(self): """A concise html explanation of this Action.""" inst = self.timemachine.presently if self.action_type == "dl": return "Deleted %s" % inst.content_type.name elif self.action_type == "cr": return "Created %s" % inst._object_type_html() else: return "Modified %s" % inst._object_type_html()
[ "def", "_description", "(", "self", ")", ":", "inst", "=", "self", ".", "timemachine", ".", "presently", "if", "self", ".", "action_type", "==", "\"dl\"", ":", "return", "\"Deleted %s\"", "%", "inst", ".", "content_type", ".", "name", "elif", "self", ".", "action_type", "==", "\"cr\"", ":", "return", "\"Created %s\"", "%", "inst", ".", "_object_type_html", "(", ")", "else", ":", "return", "\"Modified %s\"", "%", "inst", ".", "_object_type_html", "(", ")" ]
A concise html explanation of this Action.
[ "A", "concise", "html", "explanation", "of", "this", "Action", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L232-L242
251,336
boronine/discipline
discipline/models.py
Action.__get_timemachine
def __get_timemachine(self): """Return a TimeMachine for the object on which this action was performed and at the time of this action.""" if not self.__timemachine: self.__timemachine = TimeMachine( self.object_uid, step = self.id, ) return self.__timemachine.at(self.id)
python
def __get_timemachine(self): """Return a TimeMachine for the object on which this action was performed and at the time of this action.""" if not self.__timemachine: self.__timemachine = TimeMachine( self.object_uid, step = self.id, ) return self.__timemachine.at(self.id)
[ "def", "__get_timemachine", "(", "self", ")", ":", "if", "not", "self", ".", "__timemachine", ":", "self", ".", "__timemachine", "=", "TimeMachine", "(", "self", ".", "object_uid", ",", "step", "=", "self", ".", "id", ",", ")", "return", "self", ".", "__timemachine", ".", "at", "(", "self", ".", "id", ")" ]
Return a TimeMachine for the object on which this action was performed and at the time of this action.
[ "Return", "a", "TimeMachine", "for", "the", "object", "on", "which", "this", "action", "was", "performed", "and", "at", "the", "time", "of", "this", "action", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L249-L258
251,337
boronine/discipline
discipline/models.py
Action.__get_is_revertible
def __get_is_revertible(self): """Return a boolean representing whether this Action is revertible or not""" # If it was already reverted if self.reverted: return False errors = [] inst = self.timemachine if inst.fields != inst.presently.fields or \ inst.foreignkeys != inst.presently.foreignkeys: self.__undo_errors = [ "Cannot undo action %s. The database schema" " for %s has changed" % (self.id, inst.content_type.name,)] return False if self.action_type in ["dl", "md"]: # If undoing deletion, make sure it actually doesn't exist if self.action_type == "dl" and inst.presently.exists: errors.append( "Cannot undo action %d: the %s you are trying to" " recreate already exists" % (self.id, inst.content_type.name,)) # The only problem we can have by reversing this action # is that some of its foreignkeys could be pointing to # objects that have since been deleted. check_here = inst.at_previous_action for field in inst.foreignkeys: fk = check_here.get_timemachine_instance(field) # If the ForeignKey doesn't have a value if not fk: continue if not fk.exists: errors.append( "Cannot undo action %s: the %s used to link to" " a %s that has since been deleted" % (self.id, inst.content_type.name, fk.content_type.name,)) else: # self.action_type == "cr" # Make sure it actually exists if not self.timemachine.presently.exists: errors.append( "Cannot undo action %s: the %s you are trying" " to delete doesn't currently exist" % (self.id, inst.content_type.name,)) # The only problem we can have by undoing this action is # that it could have foreignkeys pointed to it, so deleting # it will cause deletion of other objects else: links = [rel.get_accessor_name() for rel in \ inst.get_object()._meta.get_all_related_objects()] for link in links: objects = getattr(inst.get_object(), link).all() for rel in objects: errors.append( "Cannot undo action %s: you are trying to" " delete a %s that has a %s pointing to it" % (self.id, inst.content_type.name, ContentType.objects.get_for_model(rel.__class__),)) self.__undo_errors = errors return (len(errors) == 0)
python
def __get_is_revertible(self): """Return a boolean representing whether this Action is revertible or not""" # If it was already reverted if self.reverted: return False errors = [] inst = self.timemachine if inst.fields != inst.presently.fields or \ inst.foreignkeys != inst.presently.foreignkeys: self.__undo_errors = [ "Cannot undo action %s. The database schema" " for %s has changed" % (self.id, inst.content_type.name,)] return False if self.action_type in ["dl", "md"]: # If undoing deletion, make sure it actually doesn't exist if self.action_type == "dl" and inst.presently.exists: errors.append( "Cannot undo action %d: the %s you are trying to" " recreate already exists" % (self.id, inst.content_type.name,)) # The only problem we can have by reversing this action # is that some of its foreignkeys could be pointing to # objects that have since been deleted. check_here = inst.at_previous_action for field in inst.foreignkeys: fk = check_here.get_timemachine_instance(field) # If the ForeignKey doesn't have a value if not fk: continue if not fk.exists: errors.append( "Cannot undo action %s: the %s used to link to" " a %s that has since been deleted" % (self.id, inst.content_type.name, fk.content_type.name,)) else: # self.action_type == "cr" # Make sure it actually exists if not self.timemachine.presently.exists: errors.append( "Cannot undo action %s: the %s you are trying" " to delete doesn't currently exist" % (self.id, inst.content_type.name,)) # The only problem we can have by undoing this action is # that it could have foreignkeys pointed to it, so deleting # it will cause deletion of other objects else: links = [rel.get_accessor_name() for rel in \ inst.get_object()._meta.get_all_related_objects()] for link in links: objects = getattr(inst.get_object(), link).all() for rel in objects: errors.append( "Cannot undo action %s: you are trying to" " delete a %s that has a %s pointing to it" % (self.id, inst.content_type.name, ContentType.objects.get_for_model(rel.__class__),)) self.__undo_errors = errors return (len(errors) == 0)
[ "def", "__get_is_revertible", "(", "self", ")", ":", "# If it was already reverted", "if", "self", ".", "reverted", ":", "return", "False", "errors", "=", "[", "]", "inst", "=", "self", ".", "timemachine", "if", "inst", ".", "fields", "!=", "inst", ".", "presently", ".", "fields", "or", "inst", ".", "foreignkeys", "!=", "inst", ".", "presently", ".", "foreignkeys", ":", "self", ".", "__undo_errors", "=", "[", "\"Cannot undo action %s. The database schema\"", "\" for %s has changed\"", "%", "(", "self", ".", "id", ",", "inst", ".", "content_type", ".", "name", ",", ")", "]", "return", "False", "if", "self", ".", "action_type", "in", "[", "\"dl\"", ",", "\"md\"", "]", ":", "# If undoing deletion, make sure it actually doesn't exist", "if", "self", ".", "action_type", "==", "\"dl\"", "and", "inst", ".", "presently", ".", "exists", ":", "errors", ".", "append", "(", "\"Cannot undo action %d: the %s you are trying to\"", "\" recreate already exists\"", "%", "(", "self", ".", "id", ",", "inst", ".", "content_type", ".", "name", ",", ")", ")", "# The only problem we can have by reversing this action", "# is that some of its foreignkeys could be pointing to", "# objects that have since been deleted.", "check_here", "=", "inst", ".", "at_previous_action", "for", "field", "in", "inst", ".", "foreignkeys", ":", "fk", "=", "check_here", ".", "get_timemachine_instance", "(", "field", ")", "# If the ForeignKey doesn't have a value", "if", "not", "fk", ":", "continue", "if", "not", "fk", ".", "exists", ":", "errors", ".", "append", "(", "\"Cannot undo action %s: the %s used to link to\"", "\" a %s that has since been deleted\"", "%", "(", "self", ".", "id", ",", "inst", ".", "content_type", ".", "name", ",", "fk", ".", "content_type", ".", "name", ",", ")", ")", "else", ":", "# self.action_type == \"cr\"", "# Make sure it actually exists", "if", "not", "self", ".", "timemachine", ".", "presently", ".", "exists", ":", "errors", ".", "append", "(", "\"Cannot undo action %s: the %s you are trying\"", "\" to delete doesn't currently exist\"", "%", "(", "self", ".", "id", ",", "inst", ".", "content_type", ".", "name", ",", ")", ")", "# The only problem we can have by undoing this action is", "# that it could have foreignkeys pointed to it, so deleting", "# it will cause deletion of other objects", "else", ":", "links", "=", "[", "rel", ".", "get_accessor_name", "(", ")", "for", "rel", "in", "inst", ".", "get_object", "(", ")", ".", "_meta", ".", "get_all_related_objects", "(", ")", "]", "for", "link", "in", "links", ":", "objects", "=", "getattr", "(", "inst", ".", "get_object", "(", ")", ",", "link", ")", ".", "all", "(", ")", "for", "rel", "in", "objects", ":", "errors", ".", "append", "(", "\"Cannot undo action %s: you are trying to\"", "\" delete a %s that has a %s pointing to it\"", "%", "(", "self", ".", "id", ",", "inst", ".", "content_type", ".", "name", ",", "ContentType", ".", "objects", ".", "get_for_model", "(", "rel", ".", "__class__", ")", ",", ")", ")", "self", ".", "__undo_errors", "=", "errors", "return", "(", "len", "(", "errors", ")", "==", "0", ")" ]
Return a boolean representing whether this Action is revertible or not
[ "Return", "a", "boolean", "representing", "whether", "this", "Action", "is", "revertible", "or", "not" ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L262-L332
251,338
boronine/discipline
discipline/models.py
Action.undo
def undo(self, editor): """Create a new Action that undos the effects of this one, or, more accurately, reverts the object of this Action to the state at which it was right before the Action took place.""" inst = self.timemachine if not self.is_revertible: raise DisciplineException("You tried to undo a non-revertible action! " "Check action.is_revertible and action.undo_errors" " before trying to undo.") if self.action_type == "dl": obj = inst.restore() self.reverted = save_object(obj, editor) self.save() elif self.action_type == "md": # Restore as it was *before* the modification obj = inst.at_previous_action.restore() self.reverted = save_object(obj, editor) self.save() else: editor.delete_object(inst.get_object()) # This is safe from race conditions but still a pretty inelegant # solution. I can't figure out a different way to find the last action # for now self.reverted = DeletionCommit.objects.filter( object_uid = self.object_uid ).order_by("-action__id")[0].action self.save()
python
def undo(self, editor): """Create a new Action that undos the effects of this one, or, more accurately, reverts the object of this Action to the state at which it was right before the Action took place.""" inst = self.timemachine if not self.is_revertible: raise DisciplineException("You tried to undo a non-revertible action! " "Check action.is_revertible and action.undo_errors" " before trying to undo.") if self.action_type == "dl": obj = inst.restore() self.reverted = save_object(obj, editor) self.save() elif self.action_type == "md": # Restore as it was *before* the modification obj = inst.at_previous_action.restore() self.reverted = save_object(obj, editor) self.save() else: editor.delete_object(inst.get_object()) # This is safe from race conditions but still a pretty inelegant # solution. I can't figure out a different way to find the last action # for now self.reverted = DeletionCommit.objects.filter( object_uid = self.object_uid ).order_by("-action__id")[0].action self.save()
[ "def", "undo", "(", "self", ",", "editor", ")", ":", "inst", "=", "self", ".", "timemachine", "if", "not", "self", ".", "is_revertible", ":", "raise", "DisciplineException", "(", "\"You tried to undo a non-revertible action! \"", "\"Check action.is_revertible and action.undo_errors\"", "\" before trying to undo.\"", ")", "if", "self", ".", "action_type", "==", "\"dl\"", ":", "obj", "=", "inst", ".", "restore", "(", ")", "self", ".", "reverted", "=", "save_object", "(", "obj", ",", "editor", ")", "self", ".", "save", "(", ")", "elif", "self", ".", "action_type", "==", "\"md\"", ":", "# Restore as it was *before* the modification", "obj", "=", "inst", ".", "at_previous_action", ".", "restore", "(", ")", "self", ".", "reverted", "=", "save_object", "(", "obj", ",", "editor", ")", "self", ".", "save", "(", ")", "else", ":", "editor", ".", "delete_object", "(", "inst", ".", "get_object", "(", ")", ")", "# This is safe from race conditions but still a pretty inelegant", "# solution. I can't figure out a different way to find the last action", "# for now", "self", ".", "reverted", "=", "DeletionCommit", ".", "objects", ".", "filter", "(", "object_uid", "=", "self", ".", "object_uid", ")", ".", "order_by", "(", "\"-action__id\"", ")", "[", "0", "]", ".", "action", "self", ".", "save", "(", ")" ]
Create a new Action that undos the effects of this one, or, more accurately, reverts the object of this Action to the state at which it was right before the Action took place.
[ "Create", "a", "new", "Action", "that", "undos", "the", "effects", "of", "this", "one", "or", "more", "accurately", "reverts", "the", "object", "of", "this", "Action", "to", "the", "state", "at", "which", "it", "was", "right", "before", "the", "Action", "took", "place", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L342-L369
251,339
boronine/discipline
discipline/models.py
Action._status
def _status(self): """Return html saying whether this Action is reverted by another one or reverts another one.""" text = "" # Turns out that is related field in null, Django # doesn't even make it a property of the object # http://code.djangoproject.com/ticket/11920 if hasattr(self, "reverts"): text += '(reverts <a href="%s">%s</a>)<br/>' % ( self.reverts.get_absolute_url(), self.reverts.id ) if self.reverted: text += '(reverted in <a href="%s">%s</a>)<br/>' % ( self.reverted.get_absolute_url(), self.reverted.id ) return text
python
def _status(self): """Return html saying whether this Action is reverted by another one or reverts another one.""" text = "" # Turns out that is related field in null, Django # doesn't even make it a property of the object # http://code.djangoproject.com/ticket/11920 if hasattr(self, "reverts"): text += '(reverts <a href="%s">%s</a>)<br/>' % ( self.reverts.get_absolute_url(), self.reverts.id ) if self.reverted: text += '(reverted in <a href="%s">%s</a>)<br/>' % ( self.reverted.get_absolute_url(), self.reverted.id ) return text
[ "def", "_status", "(", "self", ")", ":", "text", "=", "\"\"", "# Turns out that is related field in null, Django", "# doesn't even make it a property of the object", "# http://code.djangoproject.com/ticket/11920", "if", "hasattr", "(", "self", ",", "\"reverts\"", ")", ":", "text", "+=", "'(reverts <a href=\"%s\">%s</a>)<br/>'", "%", "(", "self", ".", "reverts", ".", "get_absolute_url", "(", ")", ",", "self", ".", "reverts", ".", "id", ")", "if", "self", ".", "reverted", ":", "text", "+=", "'(reverted in <a href=\"%s\">%s</a>)<br/>'", "%", "(", "self", ".", "reverted", ".", "get_absolute_url", "(", ")", ",", "self", ".", "reverted", ".", "id", ")", "return", "text" ]
Return html saying whether this Action is reverted by another one or reverts another one.
[ "Return", "html", "saying", "whether", "this", "Action", "is", "reverted", "by", "another", "one", "or", "reverts", "another", "one", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L371-L388
251,340
boronine/discipline
discipline/models.py
Action.__summary
def __summary(self): """A plaintext summary of the Action, useful for debugging.""" text = "Time: %s\n" % self.when text += "Comitter: %s\n" % self.editor inst = self.timemachine.presently if self.action_type == "dl": text += "Deleted %s\n" % inst._object_type_text() elif self.action_type == "cr": text += "Created %s\n" % inst._object_type_text() else: text += "Modified %s\n" % inst._object_type_text() text += self._details(nohtml=True) return text
python
def __summary(self): """A plaintext summary of the Action, useful for debugging.""" text = "Time: %s\n" % self.when text += "Comitter: %s\n" % self.editor inst = self.timemachine.presently if self.action_type == "dl": text += "Deleted %s\n" % inst._object_type_text() elif self.action_type == "cr": text += "Created %s\n" % inst._object_type_text() else: text += "Modified %s\n" % inst._object_type_text() text += self._details(nohtml=True) return text
[ "def", "__summary", "(", "self", ")", ":", "text", "=", "\"Time: %s\\n\"", "%", "self", ".", "when", "text", "+=", "\"Comitter: %s\\n\"", "%", "self", ".", "editor", "inst", "=", "self", ".", "timemachine", ".", "presently", "if", "self", ".", "action_type", "==", "\"dl\"", ":", "text", "+=", "\"Deleted %s\\n\"", "%", "inst", ".", "_object_type_text", "(", ")", "elif", "self", ".", "action_type", "==", "\"cr\"", ":", "text", "+=", "\"Created %s\\n\"", "%", "inst", ".", "_object_type_text", "(", ")", "else", ":", "text", "+=", "\"Modified %s\\n\"", "%", "inst", ".", "_object_type_text", "(", ")", "text", "+=", "self", ".", "_details", "(", "nohtml", "=", "True", ")", "return", "text" ]
A plaintext summary of the Action, useful for debugging.
[ "A", "plaintext", "summary", "of", "the", "Action", "useful", "for", "debugging", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L398-L412
251,341
boronine/discipline
discipline/models.py
Action._details
def _details(self, nohtml=False): """Return the html representation of the Action.""" text = "" inst = self.timemachine # If deleted or created, show every field, otherwise only # the modified if self.action_type in ("dl","cr",): fields = inst.fields + inst.foreignkeys else: fields = [i.key for i in self.modification_commits.all()] for field in fields: if not nohtml: text += "<strong>%s</strong>: " % field else: text += "%s: " % field # If modified, show what it was like one step earlier if self.action_type == "md": if not nohtml: text += "%s &#8594; " % \ inst.at_previous_action._field_value_html(field) else: text += "%s -> " % \ inst.at_previous_action._field_value_text(field) if not nohtml: text += "%s<br/>" % inst._field_value_html(field) else: text += "%s\n" % inst._field_value_text(field) return text
python
def _details(self, nohtml=False): """Return the html representation of the Action.""" text = "" inst = self.timemachine # If deleted or created, show every field, otherwise only # the modified if self.action_type in ("dl","cr",): fields = inst.fields + inst.foreignkeys else: fields = [i.key for i in self.modification_commits.all()] for field in fields: if not nohtml: text += "<strong>%s</strong>: " % field else: text += "%s: " % field # If modified, show what it was like one step earlier if self.action_type == "md": if not nohtml: text += "%s &#8594; " % \ inst.at_previous_action._field_value_html(field) else: text += "%s -> " % \ inst.at_previous_action._field_value_text(field) if not nohtml: text += "%s<br/>" % inst._field_value_html(field) else: text += "%s\n" % inst._field_value_text(field) return text
[ "def", "_details", "(", "self", ",", "nohtml", "=", "False", ")", ":", "text", "=", "\"\"", "inst", "=", "self", ".", "timemachine", "# If deleted or created, show every field, otherwise only", "# the modified", "if", "self", ".", "action_type", "in", "(", "\"dl\"", ",", "\"cr\"", ",", ")", ":", "fields", "=", "inst", ".", "fields", "+", "inst", ".", "foreignkeys", "else", ":", "fields", "=", "[", "i", ".", "key", "for", "i", "in", "self", ".", "modification_commits", ".", "all", "(", ")", "]", "for", "field", "in", "fields", ":", "if", "not", "nohtml", ":", "text", "+=", "\"<strong>%s</strong>: \"", "%", "field", "else", ":", "text", "+=", "\"%s: \"", "%", "field", "# If modified, show what it was like one step earlier", "if", "self", ".", "action_type", "==", "\"md\"", ":", "if", "not", "nohtml", ":", "text", "+=", "\"%s &#8594; \"", "%", "inst", ".", "at_previous_action", ".", "_field_value_html", "(", "field", ")", "else", ":", "text", "+=", "\"%s -> \"", "%", "inst", ".", "at_previous_action", ".", "_field_value_text", "(", "field", ")", "if", "not", "nohtml", ":", "text", "+=", "\"%s<br/>\"", "%", "inst", ".", "_field_value_html", "(", "field", ")", "else", ":", "text", "+=", "\"%s\\n\"", "%", "inst", ".", "_field_value_text", "(", "field", ")", "return", "text" ]
Return the html representation of the Action.
[ "Return", "the", "html", "representation", "of", "the", "Action", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L416-L447
251,342
boronine/discipline
discipline/models.py
TimeMachine.__update_information
def __update_information(self): """Gether information that doesn't change at different points in time""" info = {} info["actions_count"] = Action.objects.count() info["creation_times"] = [] info["deletion_times"] = [] info["content_type"] = None # Find object type and when it was created for ccommit in CreationCommit.objects.filter(object_uid=self.uid): info["creation_times"].append(ccommit.action.id) info["creation_times"].sort() for dcommit in DeletionCommit.objects.filter(object_uid=self.uid): info["deletion_times"].append(dcommit.action.id) info["deletion_times"].sort() try: info["content_type"] = ccommit.content_type except NameError: raise DisciplineException("You tried to make a TimeMachine out of" " an object that doesn't exist!") self.info = info for key in info.keys(): setattr(self, key, info[key])
python
def __update_information(self): """Gether information that doesn't change at different points in time""" info = {} info["actions_count"] = Action.objects.count() info["creation_times"] = [] info["deletion_times"] = [] info["content_type"] = None # Find object type and when it was created for ccommit in CreationCommit.objects.filter(object_uid=self.uid): info["creation_times"].append(ccommit.action.id) info["creation_times"].sort() for dcommit in DeletionCommit.objects.filter(object_uid=self.uid): info["deletion_times"].append(dcommit.action.id) info["deletion_times"].sort() try: info["content_type"] = ccommit.content_type except NameError: raise DisciplineException("You tried to make a TimeMachine out of" " an object that doesn't exist!") self.info = info for key in info.keys(): setattr(self, key, info[key])
[ "def", "__update_information", "(", "self", ")", ":", "info", "=", "{", "}", "info", "[", "\"actions_count\"", "]", "=", "Action", ".", "objects", ".", "count", "(", ")", "info", "[", "\"creation_times\"", "]", "=", "[", "]", "info", "[", "\"deletion_times\"", "]", "=", "[", "]", "info", "[", "\"content_type\"", "]", "=", "None", "# Find object type and when it was created", "for", "ccommit", "in", "CreationCommit", ".", "objects", ".", "filter", "(", "object_uid", "=", "self", ".", "uid", ")", ":", "info", "[", "\"creation_times\"", "]", ".", "append", "(", "ccommit", ".", "action", ".", "id", ")", "info", "[", "\"creation_times\"", "]", ".", "sort", "(", ")", "for", "dcommit", "in", "DeletionCommit", ".", "objects", ".", "filter", "(", "object_uid", "=", "self", ".", "uid", ")", ":", "info", "[", "\"deletion_times\"", "]", ".", "append", "(", "dcommit", ".", "action", ".", "id", ")", "info", "[", "\"deletion_times\"", "]", ".", "sort", "(", ")", "try", ":", "info", "[", "\"content_type\"", "]", "=", "ccommit", ".", "content_type", "except", "NameError", ":", "raise", "DisciplineException", "(", "\"You tried to make a TimeMachine out of\"", "\" an object that doesn't exist!\"", ")", "self", ".", "info", "=", "info", "for", "key", "in", "info", ".", "keys", "(", ")", ":", "setattr", "(", "self", ",", "key", ",", "info", "[", "key", "]", ")" ]
Gether information that doesn't change at different points in time
[ "Gether", "information", "that", "doesn", "t", "change", "at", "different", "points", "in", "time" ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L563-L595
251,343
boronine/discipline
discipline/models.py
TimeMachine.at
def at(self, step): """Return a TimeMachine for the same object at a different time. Takes an integer argument representing the id field of an Action. Returns the TimeMachine at the time of that Action. (Less ambiguously: at the time right after the Action. """ return TimeMachine( self.uid, step = step, info = copy.deepcopy(self.info) )
python
def at(self, step): """Return a TimeMachine for the same object at a different time. Takes an integer argument representing the id field of an Action. Returns the TimeMachine at the time of that Action. (Less ambiguously: at the time right after the Action. """ return TimeMachine( self.uid, step = step, info = copy.deepcopy(self.info) )
[ "def", "at", "(", "self", ",", "step", ")", ":", "return", "TimeMachine", "(", "self", ".", "uid", ",", "step", "=", "step", ",", "info", "=", "copy", ".", "deepcopy", "(", "self", ".", "info", ")", ")" ]
Return a TimeMachine for the same object at a different time. Takes an integer argument representing the id field of an Action. Returns the TimeMachine at the time of that Action. (Less ambiguously: at the time right after the Action.
[ "Return", "a", "TimeMachine", "for", "the", "same", "object", "at", "a", "different", "time", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L597-L609
251,344
boronine/discipline
discipline/models.py
TimeMachine.get
def get(self, key): """Return the value of a field. Take a string argument representing a field name, return the value of that field at the time of this TimeMachine. When restoring a ForeignKey-pointer object that doesn't exist, raise DisciplineException """ modcommit = self._get_modcommit(key) if not modcommit: return None # If this isn't a ForeignKey, then just return the value if key not in self.foreignkeys: return cPickle.loads(str(modcommit.value)) # If it is, then return the object instance try: return TimeMachine(uid = modcommit.value).get_object() except self.content_type.DoesNotExist: raise DisciplineException("When restoring a ForeignKey, the " \ "%s %s was not found." % (self.content_type.name, self.uid))
python
def get(self, key): """Return the value of a field. Take a string argument representing a field name, return the value of that field at the time of this TimeMachine. When restoring a ForeignKey-pointer object that doesn't exist, raise DisciplineException """ modcommit = self._get_modcommit(key) if not modcommit: return None # If this isn't a ForeignKey, then just return the value if key not in self.foreignkeys: return cPickle.loads(str(modcommit.value)) # If it is, then return the object instance try: return TimeMachine(uid = modcommit.value).get_object() except self.content_type.DoesNotExist: raise DisciplineException("When restoring a ForeignKey, the " \ "%s %s was not found." % (self.content_type.name, self.uid))
[ "def", "get", "(", "self", ",", "key", ")", ":", "modcommit", "=", "self", ".", "_get_modcommit", "(", "key", ")", "if", "not", "modcommit", ":", "return", "None", "# If this isn't a ForeignKey, then just return the value", "if", "key", "not", "in", "self", ".", "foreignkeys", ":", "return", "cPickle", ".", "loads", "(", "str", "(", "modcommit", ".", "value", ")", ")", "# If it is, then return the object instance", "try", ":", "return", "TimeMachine", "(", "uid", "=", "modcommit", ".", "value", ")", ".", "get_object", "(", ")", "except", "self", ".", "content_type", ".", "DoesNotExist", ":", "raise", "DisciplineException", "(", "\"When restoring a ForeignKey, the \"", "\"%s %s was not found.\"", "%", "(", "self", ".", "content_type", ".", "name", ",", "self", ".", "uid", ")", ")" ]
Return the value of a field. Take a string argument representing a field name, return the value of that field at the time of this TimeMachine. When restoring a ForeignKey-pointer object that doesn't exist, raise DisciplineException
[ "Return", "the", "value", "of", "a", "field", ".", "Take", "a", "string", "argument", "representing", "a", "field", "name", "return", "the", "value", "of", "that", "field", "at", "the", "time", "of", "this", "TimeMachine", ".", "When", "restoring", "a", "ForeignKey", "-", "pointer", "object", "that", "doesn", "t", "exist", "raise", "DisciplineException" ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L635-L654
251,345
boronine/discipline
discipline/models.py
TimeMachine.get_timemachine_instance
def get_timemachine_instance(self, key): """Return a TimeMachine for a related object. Take a string argument representing a ForeignKey field name, find what object was related to this one at the time of this TimeMachine and return a TimeMachine for that related object. """ modcommit = self._get_modcommit(key) if not modcommit: return None return TimeMachine(uid = modcommit.value)
python
def get_timemachine_instance(self, key): """Return a TimeMachine for a related object. Take a string argument representing a ForeignKey field name, find what object was related to this one at the time of this TimeMachine and return a TimeMachine for that related object. """ modcommit = self._get_modcommit(key) if not modcommit: return None return TimeMachine(uid = modcommit.value)
[ "def", "get_timemachine_instance", "(", "self", ",", "key", ")", ":", "modcommit", "=", "self", ".", "_get_modcommit", "(", "key", ")", "if", "not", "modcommit", ":", "return", "None", "return", "TimeMachine", "(", "uid", "=", "modcommit", ".", "value", ")" ]
Return a TimeMachine for a related object. Take a string argument representing a ForeignKey field name, find what object was related to this one at the time of this TimeMachine and return a TimeMachine for that related object.
[ "Return", "a", "TimeMachine", "for", "a", "related", "object", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L656-L667
251,346
boronine/discipline
discipline/models.py
TimeMachine.get_object
def get_object(self): """Return the object of this TimeMachine""" return self.content_type.model_class().objects.get(uid = self.uid)
python
def get_object(self): """Return the object of this TimeMachine""" return self.content_type.model_class().objects.get(uid = self.uid)
[ "def", "get_object", "(", "self", ")", ":", "return", "self", ".", "content_type", ".", "model_class", "(", ")", ".", "objects", ".", "get", "(", "uid", "=", "self", ".", "uid", ")" ]
Return the object of this TimeMachine
[ "Return", "the", "object", "of", "this", "TimeMachine" ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L669-L671
251,347
boronine/discipline
discipline/models.py
TimeMachine.restore
def restore(self, nosave=False): """Restore all of the object attributes to the attributes. Return the Django object. """ if self.exists: obj = self.content_type.model_class().objects.get(uid=self.uid) else: obj = self.content_type.model_class()(uid=self.uid) for field in self.fields + self.foreignkeys: obj.__setattr__(field, self.get(field)) if not nosave: obj.save() return obj
python
def restore(self, nosave=False): """Restore all of the object attributes to the attributes. Return the Django object. """ if self.exists: obj = self.content_type.model_class().objects.get(uid=self.uid) else: obj = self.content_type.model_class()(uid=self.uid) for field in self.fields + self.foreignkeys: obj.__setattr__(field, self.get(field)) if not nosave: obj.save() return obj
[ "def", "restore", "(", "self", ",", "nosave", "=", "False", ")", ":", "if", "self", ".", "exists", ":", "obj", "=", "self", ".", "content_type", ".", "model_class", "(", ")", ".", "objects", ".", "get", "(", "uid", "=", "self", ".", "uid", ")", "else", ":", "obj", "=", "self", ".", "content_type", ".", "model_class", "(", ")", "(", "uid", "=", "self", ".", "uid", ")", "for", "field", "in", "self", ".", "fields", "+", "self", ".", "foreignkeys", ":", "obj", ".", "__setattr__", "(", "field", ",", "self", ".", "get", "(", "field", ")", ")", "if", "not", "nosave", ":", "obj", ".", "save", "(", ")", "return", "obj" ]
Restore all of the object attributes to the attributes. Return the Django object.
[ "Restore", "all", "of", "the", "object", "attributes", "to", "the", "attributes", ".", "Return", "the", "Django", "object", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L711-L722
251,348
boronine/discipline
discipline/models.py
TimeMachine.url
def url(self): """Return the admin url of the object.""" return urlresolvers.reverse( "admin:%s_%s_change" % (self.content_type.app_label, self.content_type.model), args = (self.get_object().uid,))
python
def url(self): """Return the admin url of the object.""" return urlresolvers.reverse( "admin:%s_%s_change" % (self.content_type.app_label, self.content_type.model), args = (self.get_object().uid,))
[ "def", "url", "(", "self", ")", ":", "return", "urlresolvers", ".", "reverse", "(", "\"admin:%s_%s_change\"", "%", "(", "self", ".", "content_type", ".", "app_label", ",", "self", ".", "content_type", ".", "model", ")", ",", "args", "=", "(", "self", ".", "get_object", "(", ")", ".", "uid", ",", ")", ")" ]
Return the admin url of the object.
[ "Return", "the", "admin", "url", "of", "the", "object", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L727-L732
251,349
boronine/discipline
discipline/models.py
TimeMachine._object_type_html
def _object_type_html(self): """Return an html admin link with the object's type as text. If the object doesn't exist, return the object's type crossed out. """ if self.exists: return "<a href=\"%s\">%s</a>" % (self.url(), self.content_type.name,) else: return "<s>%s</s>" % self.content_type.name
python
def _object_type_html(self): """Return an html admin link with the object's type as text. If the object doesn't exist, return the object's type crossed out. """ if self.exists: return "<a href=\"%s\">%s</a>" % (self.url(), self.content_type.name,) else: return "<s>%s</s>" % self.content_type.name
[ "def", "_object_type_html", "(", "self", ")", ":", "if", "self", ".", "exists", ":", "return", "\"<a href=\\\"%s\\\">%s</a>\"", "%", "(", "self", ".", "url", "(", ")", ",", "self", ".", "content_type", ".", "name", ",", ")", "else", ":", "return", "\"<s>%s</s>\"", "%", "self", ".", "content_type", ".", "name" ]
Return an html admin link with the object's type as text. If the object doesn't exist, return the object's type crossed out.
[ "Return", "an", "html", "admin", "link", "with", "the", "object", "s", "type", "as", "text", ".", "If", "the", "object", "doesn", "t", "exist", "return", "the", "object", "s", "type", "crossed", "out", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L735-L744
251,350
boronine/discipline
discipline/models.py
SchemaState.get_for_content_type
def get_for_content_type(self, ct): """Return the schema for the model of the given ContentType object""" try: return json.loads(self.state)[ct.app_label][ct.model] except KeyError: return None
python
def get_for_content_type(self, ct): """Return the schema for the model of the given ContentType object""" try: return json.loads(self.state)[ct.app_label][ct.model] except KeyError: return None
[ "def", "get_for_content_type", "(", "self", ",", "ct", ")", ":", "try", ":", "return", "json", ".", "loads", "(", "self", ".", "state", ")", "[", "ct", ".", "app_label", "]", "[", "ct", ".", "model", "]", "except", "KeyError", ":", "return", "None" ]
Return the schema for the model of the given ContentType object
[ "Return", "the", "schema", "for", "the", "model", "of", "the", "given", "ContentType", "object" ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L798-L803
251,351
boronine/discipline
discipline/models.py
SchemaState.html_state
def html_state(self): """Display state in HTML format for the admin form.""" ret = "" state = json.loads(self.state) for (app, appstate) in state.items(): for (model, modelstate) in appstate.items(): ret += "<p>%s.models.%s</p>" % (app, model,) ret += "<ul>" for field in modelstate["fields"] + ["uid"]: ret += "<li>%s</li>" % field for fk in modelstate["foreignkeys"]: ret += "<li>%s (foreign key)</li>" % fk ret += "</ul>" return ret
python
def html_state(self): """Display state in HTML format for the admin form.""" ret = "" state = json.loads(self.state) for (app, appstate) in state.items(): for (model, modelstate) in appstate.items(): ret += "<p>%s.models.%s</p>" % (app, model,) ret += "<ul>" for field in modelstate["fields"] + ["uid"]: ret += "<li>%s</li>" % field for fk in modelstate["foreignkeys"]: ret += "<li>%s (foreign key)</li>" % fk ret += "</ul>" return ret
[ "def", "html_state", "(", "self", ")", ":", "ret", "=", "\"\"", "state", "=", "json", ".", "loads", "(", "self", ".", "state", ")", "for", "(", "app", ",", "appstate", ")", "in", "state", ".", "items", "(", ")", ":", "for", "(", "model", ",", "modelstate", ")", "in", "appstate", ".", "items", "(", ")", ":", "ret", "+=", "\"<p>%s.models.%s</p>\"", "%", "(", "app", ",", "model", ",", ")", "ret", "+=", "\"<ul>\"", "for", "field", "in", "modelstate", "[", "\"fields\"", "]", "+", "[", "\"uid\"", "]", ":", "ret", "+=", "\"<li>%s</li>\"", "%", "field", "for", "fk", "in", "modelstate", "[", "\"foreignkeys\"", "]", ":", "ret", "+=", "\"<li>%s (foreign key)</li>\"", "%", "fk", "ret", "+=", "\"</ul>\"", "return", "ret" ]
Display state in HTML format for the admin form.
[ "Display", "state", "in", "HTML", "format", "for", "the", "admin", "form", "." ]
68bea9bc2198cc91cee49a6e2d0f3333cc9bf476
https://github.com/boronine/discipline/blob/68bea9bc2198cc91cee49a6e2d0f3333cc9bf476/discipline/models.py#L808-L821
251,352
ryanjdillon/pyotelem
pyotelem/plots/plotdynamics.py
plot_prh_des_asc
def plot_prh_des_asc(p, r, h, asc, des): '''Plot pitch, roll, and heading during the descent and ascent dive phases Args ---- p: ndarray Derived pitch data r: ndarray Derived roll data h: ndarray Derived heading data des: ndarray boolean mask for slicing descent phases of dives from tag dta asc: ndarray boolean mask for slicing asccent phases of dives from tag dta ''' import matplotlib.pyplot as plt import numpy from . import plotutils # Convert boolean mask to indices des_ind = numpy.where(des)[0] asc_ind = numpy.where(asc)[0] fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex='col') ax1.title.set_text('Pitch') ax1 = plotutils.plot_noncontiguous(ax1, p, des_ind, _colors[0], 'descents') ax1 = plotutils.plot_noncontiguous(ax1, p, asc_ind, _colors[1], 'ascents') ax1.title.set_text('Roll') ax2 = plotutils.plot_noncontiguous(ax2, r, des_ind, _colors[0], 'descents') ax2 = plotutils.plot_noncontiguous(ax2, r, asc_ind, _colors[1], 'ascents') ax1.title.set_text('Heading') ax3 = plotutils.plot_noncontiguous(ax3, h, des_ind, _colors[0], 'descents') ax3 = plotutils.plot_noncontiguous(ax3, h, asc_ind, _colors[1], 'ascents') for ax in [ax1, ax2, ax3]: ax.legend(loc="upper right") plt.ylabel('Radians') plt.xlabel('Samples') plt.show() return None
python
def plot_prh_des_asc(p, r, h, asc, des): '''Plot pitch, roll, and heading during the descent and ascent dive phases Args ---- p: ndarray Derived pitch data r: ndarray Derived roll data h: ndarray Derived heading data des: ndarray boolean mask for slicing descent phases of dives from tag dta asc: ndarray boolean mask for slicing asccent phases of dives from tag dta ''' import matplotlib.pyplot as plt import numpy from . import plotutils # Convert boolean mask to indices des_ind = numpy.where(des)[0] asc_ind = numpy.where(asc)[0] fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex='col') ax1.title.set_text('Pitch') ax1 = plotutils.plot_noncontiguous(ax1, p, des_ind, _colors[0], 'descents') ax1 = plotutils.plot_noncontiguous(ax1, p, asc_ind, _colors[1], 'ascents') ax1.title.set_text('Roll') ax2 = plotutils.plot_noncontiguous(ax2, r, des_ind, _colors[0], 'descents') ax2 = plotutils.plot_noncontiguous(ax2, r, asc_ind, _colors[1], 'ascents') ax1.title.set_text('Heading') ax3 = plotutils.plot_noncontiguous(ax3, h, des_ind, _colors[0], 'descents') ax3 = plotutils.plot_noncontiguous(ax3, h, asc_ind, _colors[1], 'ascents') for ax in [ax1, ax2, ax3]: ax.legend(loc="upper right") plt.ylabel('Radians') plt.xlabel('Samples') plt.show() return None
[ "def", "plot_prh_des_asc", "(", "p", ",", "r", ",", "h", ",", "asc", ",", "des", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "import", "numpy", "from", ".", "import", "plotutils", "# Convert boolean mask to indices", "des_ind", "=", "numpy", ".", "where", "(", "des", ")", "[", "0", "]", "asc_ind", "=", "numpy", ".", "where", "(", "asc", ")", "[", "0", "]", "fig", ",", "(", "ax1", ",", "ax2", ",", "ax3", ")", "=", "plt", ".", "subplots", "(", "3", ",", "1", ",", "sharex", "=", "'col'", ")", "ax1", ".", "title", ".", "set_text", "(", "'Pitch'", ")", "ax1", "=", "plotutils", ".", "plot_noncontiguous", "(", "ax1", ",", "p", ",", "des_ind", ",", "_colors", "[", "0", "]", ",", "'descents'", ")", "ax1", "=", "plotutils", ".", "plot_noncontiguous", "(", "ax1", ",", "p", ",", "asc_ind", ",", "_colors", "[", "1", "]", ",", "'ascents'", ")", "ax1", ".", "title", ".", "set_text", "(", "'Roll'", ")", "ax2", "=", "plotutils", ".", "plot_noncontiguous", "(", "ax2", ",", "r", ",", "des_ind", ",", "_colors", "[", "0", "]", ",", "'descents'", ")", "ax2", "=", "plotutils", ".", "plot_noncontiguous", "(", "ax2", ",", "r", ",", "asc_ind", ",", "_colors", "[", "1", "]", ",", "'ascents'", ")", "ax1", ".", "title", ".", "set_text", "(", "'Heading'", ")", "ax3", "=", "plotutils", ".", "plot_noncontiguous", "(", "ax3", ",", "h", ",", "des_ind", ",", "_colors", "[", "0", "]", ",", "'descents'", ")", "ax3", "=", "plotutils", ".", "plot_noncontiguous", "(", "ax3", ",", "h", ",", "asc_ind", ",", "_colors", "[", "1", "]", ",", "'ascents'", ")", "for", "ax", "in", "[", "ax1", ",", "ax2", ",", "ax3", "]", ":", "ax", ".", "legend", "(", "loc", "=", "\"upper right\"", ")", "plt", ".", "ylabel", "(", "'Radians'", ")", "plt", ".", "xlabel", "(", "'Samples'", ")", "plt", ".", "show", "(", ")", "return", "None" ]
Plot pitch, roll, and heading during the descent and ascent dive phases Args ---- p: ndarray Derived pitch data r: ndarray Derived roll data h: ndarray Derived heading data des: ndarray boolean mask for slicing descent phases of dives from tag dta asc: ndarray boolean mask for slicing asccent phases of dives from tag dta
[ "Plot", "pitch", "roll", "and", "heading", "during", "the", "descent", "and", "ascent", "dive", "phases" ]
816563a9c3feb3fa416f1c2921c6b75db34111ad
https://github.com/ryanjdillon/pyotelem/blob/816563a9c3feb3fa416f1c2921c6b75db34111ad/pyotelem/plots/plotdynamics.py#L11-L58
251,353
ryanjdillon/pyotelem
pyotelem/plots/plotdynamics.py
plot_prh_filtered
def plot_prh_filtered(p, r, h, p_lf, r_lf, h_lf): '''Plot original and low-pass filtered PRH data Args ---- p: ndarray Derived pitch data r: ndarray Derived roll data h: ndarray Derived heading data p_lf: ndarray Low-pass filtered pitch data r_lf: ndarray Low-pass filtered roll data h_lf: ndarray Low-pass filtered heading data ''' import numpy fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex='col') #rad2deg = lambda x: x*180/numpy.pi ax1.title.set_text('Pitch') ax1.plot(range(len(p)), p, color=_colors[0], linewidth=_linewidth, label='original') ax1.plot(range(len(p_lf)), p_lf, color=_colors[1], linewidth=_linewidth, label='filtered') ax2.title.set_text('Roll') ax2.plot(range(len(r)), r, color=_colors[2], linewidth=_linewidth, label='original') ax2.plot(range(len(r_lf)), r_lf, color=_colors[3], linewidth=_linewidth, label='filtered') ax3.title.set_text('Heading') ax3.plot(range(len(h)), h, color=_colors[4], linewidth=_linewidth, label='original') ax3.plot(range(len(h_lf)), h_lf, color=_colors[5], linewidth=_linewidth, label='filtered') plt.ylabel('Radians') plt.xlabel('Samples') for ax in [ax1, ax2, ax3]: ax.legend(loc="upper right") plt.show() return None
python
def plot_prh_filtered(p, r, h, p_lf, r_lf, h_lf): '''Plot original and low-pass filtered PRH data Args ---- p: ndarray Derived pitch data r: ndarray Derived roll data h: ndarray Derived heading data p_lf: ndarray Low-pass filtered pitch data r_lf: ndarray Low-pass filtered roll data h_lf: ndarray Low-pass filtered heading data ''' import numpy fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex='col') #rad2deg = lambda x: x*180/numpy.pi ax1.title.set_text('Pitch') ax1.plot(range(len(p)), p, color=_colors[0], linewidth=_linewidth, label='original') ax1.plot(range(len(p_lf)), p_lf, color=_colors[1], linewidth=_linewidth, label='filtered') ax2.title.set_text('Roll') ax2.plot(range(len(r)), r, color=_colors[2], linewidth=_linewidth, label='original') ax2.plot(range(len(r_lf)), r_lf, color=_colors[3], linewidth=_linewidth, label='filtered') ax3.title.set_text('Heading') ax3.plot(range(len(h)), h, color=_colors[4], linewidth=_linewidth, label='original') ax3.plot(range(len(h_lf)), h_lf, color=_colors[5], linewidth=_linewidth, label='filtered') plt.ylabel('Radians') plt.xlabel('Samples') for ax in [ax1, ax2, ax3]: ax.legend(loc="upper right") plt.show() return None
[ "def", "plot_prh_filtered", "(", "p", ",", "r", ",", "h", ",", "p_lf", ",", "r_lf", ",", "h_lf", ")", ":", "import", "numpy", "fig", ",", "(", "ax1", ",", "ax2", ",", "ax3", ")", "=", "plt", ".", "subplots", "(", "3", ",", "1", ",", "sharex", "=", "'col'", ")", "#rad2deg = lambda x: x*180/numpy.pi", "ax1", ".", "title", ".", "set_text", "(", "'Pitch'", ")", "ax1", ".", "plot", "(", "range", "(", "len", "(", "p", ")", ")", ",", "p", ",", "color", "=", "_colors", "[", "0", "]", ",", "linewidth", "=", "_linewidth", ",", "label", "=", "'original'", ")", "ax1", ".", "plot", "(", "range", "(", "len", "(", "p_lf", ")", ")", ",", "p_lf", ",", "color", "=", "_colors", "[", "1", "]", ",", "linewidth", "=", "_linewidth", ",", "label", "=", "'filtered'", ")", "ax2", ".", "title", ".", "set_text", "(", "'Roll'", ")", "ax2", ".", "plot", "(", "range", "(", "len", "(", "r", ")", ")", ",", "r", ",", "color", "=", "_colors", "[", "2", "]", ",", "linewidth", "=", "_linewidth", ",", "label", "=", "'original'", ")", "ax2", ".", "plot", "(", "range", "(", "len", "(", "r_lf", ")", ")", ",", "r_lf", ",", "color", "=", "_colors", "[", "3", "]", ",", "linewidth", "=", "_linewidth", ",", "label", "=", "'filtered'", ")", "ax3", ".", "title", ".", "set_text", "(", "'Heading'", ")", "ax3", ".", "plot", "(", "range", "(", "len", "(", "h", ")", ")", ",", "h", ",", "color", "=", "_colors", "[", "4", "]", ",", "linewidth", "=", "_linewidth", ",", "label", "=", "'original'", ")", "ax3", ".", "plot", "(", "range", "(", "len", "(", "h_lf", ")", ")", ",", "h_lf", ",", "color", "=", "_colors", "[", "5", "]", ",", "linewidth", "=", "_linewidth", ",", "label", "=", "'filtered'", ")", "plt", ".", "ylabel", "(", "'Radians'", ")", "plt", ".", "xlabel", "(", "'Samples'", ")", "for", "ax", "in", "[", "ax1", ",", "ax2", ",", "ax3", "]", ":", "ax", ".", "legend", "(", "loc", "=", "\"upper right\"", ")", "plt", ".", "show", "(", ")", "return", "None" ]
Plot original and low-pass filtered PRH data Args ---- p: ndarray Derived pitch data r: ndarray Derived roll data h: ndarray Derived heading data p_lf: ndarray Low-pass filtered pitch data r_lf: ndarray Low-pass filtered roll data h_lf: ndarray Low-pass filtered heading data
[ "Plot", "original", "and", "low", "-", "pass", "filtered", "PRH", "data" ]
816563a9c3feb3fa416f1c2921c6b75db34111ad
https://github.com/ryanjdillon/pyotelem/blob/816563a9c3feb3fa416f1c2921c6b75db34111ad/pyotelem/plots/plotdynamics.py#L61-L110
251,354
ryanjdillon/pyotelem
pyotelem/plots/plotdynamics.py
plot_swim_speed
def plot_swim_speed(exp_ind, swim_speed): '''Plot the swim speed during experimental indices Args ---- exp_ind: ndarray Indices of tag data where experiment is active swim_speed: ndarray Swim speed data at sensor sampling rate ''' import numpy fig, ax = plt.subplots() ax.title.set_text('Swim speed from depth change and pitch angle (m/s^2') ax.plot(exp_ind, swim_speed, linewidth=_linewidth, label='speed') ymax = numpy.ceil(swim_speed[~numpy.isnan(swim_speed)].max()) ax.set_ylim(0, ymax) ax.legend(loc='upper right') plt.show() return ax
python
def plot_swim_speed(exp_ind, swim_speed): '''Plot the swim speed during experimental indices Args ---- exp_ind: ndarray Indices of tag data where experiment is active swim_speed: ndarray Swim speed data at sensor sampling rate ''' import numpy fig, ax = plt.subplots() ax.title.set_text('Swim speed from depth change and pitch angle (m/s^2') ax.plot(exp_ind, swim_speed, linewidth=_linewidth, label='speed') ymax = numpy.ceil(swim_speed[~numpy.isnan(swim_speed)].max()) ax.set_ylim(0, ymax) ax.legend(loc='upper right') plt.show() return ax
[ "def", "plot_swim_speed", "(", "exp_ind", ",", "swim_speed", ")", ":", "import", "numpy", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "ax", ".", "title", ".", "set_text", "(", "'Swim speed from depth change and pitch angle (m/s^2'", ")", "ax", ".", "plot", "(", "exp_ind", ",", "swim_speed", ",", "linewidth", "=", "_linewidth", ",", "label", "=", "'speed'", ")", "ymax", "=", "numpy", ".", "ceil", "(", "swim_speed", "[", "~", "numpy", ".", "isnan", "(", "swim_speed", ")", "]", ".", "max", "(", ")", ")", "ax", ".", "set_ylim", "(", "0", ",", "ymax", ")", "ax", ".", "legend", "(", "loc", "=", "'upper right'", ")", "plt", ".", "show", "(", ")", "return", "ax" ]
Plot the swim speed during experimental indices Args ---- exp_ind: ndarray Indices of tag data where experiment is active swim_speed: ndarray Swim speed data at sensor sampling rate
[ "Plot", "the", "swim", "speed", "during", "experimental", "indices" ]
816563a9c3feb3fa416f1c2921c6b75db34111ad
https://github.com/ryanjdillon/pyotelem/blob/816563a9c3feb3fa416f1c2921c6b75db34111ad/pyotelem/plots/plotdynamics.py#L113-L135
251,355
spookey/photon
photon/settings.py
Settings.load
def load(self, skey, sdesc, sdict=None, loaders=None, merge=False, writeback=False): ''' Loads a dictionary into current settings :param skey: Type of data to load. Is be used to reference the data \ in the files sections within settings :param sdesc: Either filename of yaml-file to load or further description of \ imported data when `sdict` is used :param dict sdict: Directly pass data as dictionary instead of loading \ it from a yaml-file. \ Make sure to set `skey` and `sdesc` accordingly :param list loaders: Append custom loaders to the YAML-loader. :param merge: Merge received data into current settings or \ place it under `skey` within meta :param writeback: Write back loaded (and merged/imported) result back \ to the original file. \ This is used to generate the summary files :returns: The loaded (or directly passed) content .. seealso:: |yaml_loaders| ''' y = sdict if sdict else read_yaml(sdesc, add_constructor=loaders) if y and isinstance(y, dict): if not sdict: self.__settings['files'].update({skey: sdesc}) if merge: self.__settings = dict_merge(self.__settings, y) else: self.__settings[skey] = y shell_notify( 'load %s data and %s it into settings' % ( 'got' if sdict else 'read', 'merged' if merge else 'imported' ), more=dict(skey=skey, sdesc=sdesc, merge=merge, writeback=writeback), verbose=self.__verbose ) if writeback and y != self.__settings: write_yaml(sdesc, self.__settings) return y
python
def load(self, skey, sdesc, sdict=None, loaders=None, merge=False, writeback=False): ''' Loads a dictionary into current settings :param skey: Type of data to load. Is be used to reference the data \ in the files sections within settings :param sdesc: Either filename of yaml-file to load or further description of \ imported data when `sdict` is used :param dict sdict: Directly pass data as dictionary instead of loading \ it from a yaml-file. \ Make sure to set `skey` and `sdesc` accordingly :param list loaders: Append custom loaders to the YAML-loader. :param merge: Merge received data into current settings or \ place it under `skey` within meta :param writeback: Write back loaded (and merged/imported) result back \ to the original file. \ This is used to generate the summary files :returns: The loaded (or directly passed) content .. seealso:: |yaml_loaders| ''' y = sdict if sdict else read_yaml(sdesc, add_constructor=loaders) if y and isinstance(y, dict): if not sdict: self.__settings['files'].update({skey: sdesc}) if merge: self.__settings = dict_merge(self.__settings, y) else: self.__settings[skey] = y shell_notify( 'load %s data and %s it into settings' % ( 'got' if sdict else 'read', 'merged' if merge else 'imported' ), more=dict(skey=skey, sdesc=sdesc, merge=merge, writeback=writeback), verbose=self.__verbose ) if writeback and y != self.__settings: write_yaml(sdesc, self.__settings) return y
[ "def", "load", "(", "self", ",", "skey", ",", "sdesc", ",", "sdict", "=", "None", ",", "loaders", "=", "None", ",", "merge", "=", "False", ",", "writeback", "=", "False", ")", ":", "y", "=", "sdict", "if", "sdict", "else", "read_yaml", "(", "sdesc", ",", "add_constructor", "=", "loaders", ")", "if", "y", "and", "isinstance", "(", "y", ",", "dict", ")", ":", "if", "not", "sdict", ":", "self", ".", "__settings", "[", "'files'", "]", ".", "update", "(", "{", "skey", ":", "sdesc", "}", ")", "if", "merge", ":", "self", ".", "__settings", "=", "dict_merge", "(", "self", ".", "__settings", ",", "y", ")", "else", ":", "self", ".", "__settings", "[", "skey", "]", "=", "y", "shell_notify", "(", "'load %s data and %s it into settings'", "%", "(", "'got'", "if", "sdict", "else", "'read'", ",", "'merged'", "if", "merge", "else", "'imported'", ")", ",", "more", "=", "dict", "(", "skey", "=", "skey", ",", "sdesc", "=", "sdesc", ",", "merge", "=", "merge", ",", "writeback", "=", "writeback", ")", ",", "verbose", "=", "self", ".", "__verbose", ")", "if", "writeback", "and", "y", "!=", "self", ".", "__settings", ":", "write_yaml", "(", "sdesc", ",", "self", ".", "__settings", ")", "return", "y" ]
Loads a dictionary into current settings :param skey: Type of data to load. Is be used to reference the data \ in the files sections within settings :param sdesc: Either filename of yaml-file to load or further description of \ imported data when `sdict` is used :param dict sdict: Directly pass data as dictionary instead of loading \ it from a yaml-file. \ Make sure to set `skey` and `sdesc` accordingly :param list loaders: Append custom loaders to the YAML-loader. :param merge: Merge received data into current settings or \ place it under `skey` within meta :param writeback: Write back loaded (and merged/imported) result back \ to the original file. \ This is used to generate the summary files :returns: The loaded (or directly passed) content .. seealso:: |yaml_loaders|
[ "Loads", "a", "dictionary", "into", "current", "settings" ]
57212a26ce713ab7723910ee49e3d0ba1697799f
https://github.com/spookey/photon/blob/57212a26ce713ab7723910ee49e3d0ba1697799f/photon/settings.py#L134-L183
251,356
jmgilman/Neolib
neolib/pyamf/sol.py
encode
def encode(name, values, strict=True, encoding=pyamf.AMF0): """ Produces a SharedObject encoded stream based on the name and values. @param name: The root name of the SharedObject. @param values: A `dict` of name value pairs to be encoded in the stream. @param strict: Ensure that the SOL stream is as spec compatible as possible. @return: A SharedObject encoded stream. @rtype: L{BufferedByteStream<pyamf.util.BufferedByteStream>}, a file like object. """ encoder = pyamf.get_encoder(encoding) stream = encoder.stream # write the header stream.write(HEADER_VERSION) if strict: length_pos = stream.tell() stream.write_ulong(0) # write the signature stream.write(HEADER_SIGNATURE) # write the root name name = name.encode('utf-8') stream.write_ushort(len(name)) stream.write(name) # write the padding stream.write(PADDING_BYTE * 3) stream.write_uchar(encoding) for n, v in values.iteritems(): encoder.serialiseString(n) encoder.writeElement(v) # write the padding stream.write(PADDING_BYTE) if strict: stream.seek(length_pos) stream.write_ulong(stream.remaining() - 4) stream.seek(0) return stream
python
def encode(name, values, strict=True, encoding=pyamf.AMF0): """ Produces a SharedObject encoded stream based on the name and values. @param name: The root name of the SharedObject. @param values: A `dict` of name value pairs to be encoded in the stream. @param strict: Ensure that the SOL stream is as spec compatible as possible. @return: A SharedObject encoded stream. @rtype: L{BufferedByteStream<pyamf.util.BufferedByteStream>}, a file like object. """ encoder = pyamf.get_encoder(encoding) stream = encoder.stream # write the header stream.write(HEADER_VERSION) if strict: length_pos = stream.tell() stream.write_ulong(0) # write the signature stream.write(HEADER_SIGNATURE) # write the root name name = name.encode('utf-8') stream.write_ushort(len(name)) stream.write(name) # write the padding stream.write(PADDING_BYTE * 3) stream.write_uchar(encoding) for n, v in values.iteritems(): encoder.serialiseString(n) encoder.writeElement(v) # write the padding stream.write(PADDING_BYTE) if strict: stream.seek(length_pos) stream.write_ulong(stream.remaining() - 4) stream.seek(0) return stream
[ "def", "encode", "(", "name", ",", "values", ",", "strict", "=", "True", ",", "encoding", "=", "pyamf", ".", "AMF0", ")", ":", "encoder", "=", "pyamf", ".", "get_encoder", "(", "encoding", ")", "stream", "=", "encoder", ".", "stream", "# write the header", "stream", ".", "write", "(", "HEADER_VERSION", ")", "if", "strict", ":", "length_pos", "=", "stream", ".", "tell", "(", ")", "stream", ".", "write_ulong", "(", "0", ")", "# write the signature", "stream", ".", "write", "(", "HEADER_SIGNATURE", ")", "# write the root name", "name", "=", "name", ".", "encode", "(", "'utf-8'", ")", "stream", ".", "write_ushort", "(", "len", "(", "name", ")", ")", "stream", ".", "write", "(", "name", ")", "# write the padding", "stream", ".", "write", "(", "PADDING_BYTE", "*", "3", ")", "stream", ".", "write_uchar", "(", "encoding", ")", "for", "n", ",", "v", "in", "values", ".", "iteritems", "(", ")", ":", "encoder", ".", "serialiseString", "(", "n", ")", "encoder", ".", "writeElement", "(", "v", ")", "# write the padding", "stream", ".", "write", "(", "PADDING_BYTE", ")", "if", "strict", ":", "stream", ".", "seek", "(", "length_pos", ")", "stream", ".", "write_ulong", "(", "stream", ".", "remaining", "(", ")", "-", "4", ")", "stream", ".", "seek", "(", "0", ")", "return", "stream" ]
Produces a SharedObject encoded stream based on the name and values. @param name: The root name of the SharedObject. @param values: A `dict` of name value pairs to be encoded in the stream. @param strict: Ensure that the SOL stream is as spec compatible as possible. @return: A SharedObject encoded stream. @rtype: L{BufferedByteStream<pyamf.util.BufferedByteStream>}, a file like object.
[ "Produces", "a", "SharedObject", "encoded", "stream", "based", "on", "the", "name", "and", "values", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/sol.py#L85-L133
251,357
jamieleshaw/lurklib
lurklib/__init__.py
Client.mainloop
def mainloop(self): """ Handles events and calls their handler for infinity. """ while self.keep_going: with self.lock: if self.on_connect and not self.readable(2): self.on_connect() self.on_connect = None if not self.keep_going: break self.process_once()
python
def mainloop(self): """ Handles events and calls their handler for infinity. """ while self.keep_going: with self.lock: if self.on_connect and not self.readable(2): self.on_connect() self.on_connect = None if not self.keep_going: break self.process_once()
[ "def", "mainloop", "(", "self", ")", ":", "while", "self", ".", "keep_going", ":", "with", "self", ".", "lock", ":", "if", "self", ".", "on_connect", "and", "not", "self", ".", "readable", "(", "2", ")", ":", "self", ".", "on_connect", "(", ")", "self", ".", "on_connect", "=", "None", "if", "not", "self", ".", "keep_going", ":", "break", "self", ".", "process_once", "(", ")" ]
Handles events and calls their handler for infinity.
[ "Handles", "events", "and", "calls", "their", "handler", "for", "infinity", "." ]
a861f35d880140422103dd78ec3239814e85fd7e
https://github.com/jamieleshaw/lurklib/blob/a861f35d880140422103dd78ec3239814e85fd7e/lurklib/__init__.py#L86-L97
251,358
treycucco/bidon
bidon/spreadsheet/excel.py
ExcelWorksheet.parse_cell
def parse_cell(self, cell, coords, cell_mode=CellMode.cooked): """Parses a cell according to the cell.ctype.""" # pylint: disable=too-many-return-statements if cell_mode == CellMode.cooked: if cell.ctype == xlrd.XL_CELL_BLANK: return None if cell.ctype == xlrd.XL_CELL_BOOLEAN: return cell.value if cell.ctype == xlrd.XL_CELL_DATE: if self.handle_ambiguous_date: try: return self._parse_date(cell.value) except xlrd.xldate.XLDateAmbiguous: return self.handle_ambiguous_date(cell.value) else: return self._parse_date(cell.value) if cell.ctype == xlrd.XL_CELL_EMPTY: return None if cell.ctype == xlrd.XL_CELL_ERROR: return cell.value if cell.ctype == xlrd.XL_CELL_NUMBER: return cell.value if cell.ctype == xlrd.XL_CELL_TEXT: return cell.value raise ValueError("Unhandled cell type {0}".format(cell.ctype)) else: return cell
python
def parse_cell(self, cell, coords, cell_mode=CellMode.cooked): """Parses a cell according to the cell.ctype.""" # pylint: disable=too-many-return-statements if cell_mode == CellMode.cooked: if cell.ctype == xlrd.XL_CELL_BLANK: return None if cell.ctype == xlrd.XL_CELL_BOOLEAN: return cell.value if cell.ctype == xlrd.XL_CELL_DATE: if self.handle_ambiguous_date: try: return self._parse_date(cell.value) except xlrd.xldate.XLDateAmbiguous: return self.handle_ambiguous_date(cell.value) else: return self._parse_date(cell.value) if cell.ctype == xlrd.XL_CELL_EMPTY: return None if cell.ctype == xlrd.XL_CELL_ERROR: return cell.value if cell.ctype == xlrd.XL_CELL_NUMBER: return cell.value if cell.ctype == xlrd.XL_CELL_TEXT: return cell.value raise ValueError("Unhandled cell type {0}".format(cell.ctype)) else: return cell
[ "def", "parse_cell", "(", "self", ",", "cell", ",", "coords", ",", "cell_mode", "=", "CellMode", ".", "cooked", ")", ":", "# pylint: disable=too-many-return-statements", "if", "cell_mode", "==", "CellMode", ".", "cooked", ":", "if", "cell", ".", "ctype", "==", "xlrd", ".", "XL_CELL_BLANK", ":", "return", "None", "if", "cell", ".", "ctype", "==", "xlrd", ".", "XL_CELL_BOOLEAN", ":", "return", "cell", ".", "value", "if", "cell", ".", "ctype", "==", "xlrd", ".", "XL_CELL_DATE", ":", "if", "self", ".", "handle_ambiguous_date", ":", "try", ":", "return", "self", ".", "_parse_date", "(", "cell", ".", "value", ")", "except", "xlrd", ".", "xldate", ".", "XLDateAmbiguous", ":", "return", "self", ".", "handle_ambiguous_date", "(", "cell", ".", "value", ")", "else", ":", "return", "self", ".", "_parse_date", "(", "cell", ".", "value", ")", "if", "cell", ".", "ctype", "==", "xlrd", ".", "XL_CELL_EMPTY", ":", "return", "None", "if", "cell", ".", "ctype", "==", "xlrd", ".", "XL_CELL_ERROR", ":", "return", "cell", ".", "value", "if", "cell", ".", "ctype", "==", "xlrd", ".", "XL_CELL_NUMBER", ":", "return", "cell", ".", "value", "if", "cell", ".", "ctype", "==", "xlrd", ".", "XL_CELL_TEXT", ":", "return", "cell", ".", "value", "raise", "ValueError", "(", "\"Unhandled cell type {0}\"", ".", "format", "(", "cell", ".", "ctype", ")", ")", "else", ":", "return", "cell" ]
Parses a cell according to the cell.ctype.
[ "Parses", "a", "cell", "according", "to", "the", "cell", ".", "ctype", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/spreadsheet/excel.py#L22-L55
251,359
treycucco/bidon
bidon/spreadsheet/excel.py
ExcelWorksheet.get_note
def get_note(self, coords): """Get the note for the cell at the given coordinates. coords is a tuple of (col, row) """ col, row = coords note = self.raw_sheet.cell_note_map.get((row, col)) return note.text if note else None
python
def get_note(self, coords): """Get the note for the cell at the given coordinates. coords is a tuple of (col, row) """ col, row = coords note = self.raw_sheet.cell_note_map.get((row, col)) return note.text if note else None
[ "def", "get_note", "(", "self", ",", "coords", ")", ":", "col", ",", "row", "=", "coords", "note", "=", "self", ".", "raw_sheet", ".", "cell_note_map", ".", "get", "(", "(", "row", ",", "col", ")", ")", "return", "note", ".", "text", "if", "note", "else", "None" ]
Get the note for the cell at the given coordinates. coords is a tuple of (col, row)
[ "Get", "the", "note", "for", "the", "cell", "at", "the", "given", "coordinates", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/spreadsheet/excel.py#L69-L76
251,360
treycucco/bidon
bidon/spreadsheet/excel.py
ExcelWorksheet._parse_date
def _parse_date(self, cell_value): """Attempts to parse a cell_value as a date.""" date_tuple = xlrd.xldate_as_tuple(cell_value, self.raw_sheet.book.datemode) return self.tuple_to_datetime(date_tuple)
python
def _parse_date(self, cell_value): """Attempts to parse a cell_value as a date.""" date_tuple = xlrd.xldate_as_tuple(cell_value, self.raw_sheet.book.datemode) return self.tuple_to_datetime(date_tuple)
[ "def", "_parse_date", "(", "self", ",", "cell_value", ")", ":", "date_tuple", "=", "xlrd", ".", "xldate_as_tuple", "(", "cell_value", ",", "self", ".", "raw_sheet", ".", "book", ".", "datemode", ")", "return", "self", ".", "tuple_to_datetime", "(", "date_tuple", ")" ]
Attempts to parse a cell_value as a date.
[ "Attempts", "to", "parse", "a", "cell_value", "as", "a", "date", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/spreadsheet/excel.py#L78-L81
251,361
itsnauman/termrule
tr/termrule.py
TermRule._parse_args
def _parse_args(cls): """ Method to parse command line arguments """ cls.parser = argparse.ArgumentParser() cls.parser.add_argument( "symbol", help="Symbol for horizontal line", nargs="*") cls.parser.add_argument( "--color", "-c", help="Color of the line", default=None, nargs=1) cls.parser.add_argument( "--version", "-v", action="version", version="0.13") return cls.parser
python
def _parse_args(cls): """ Method to parse command line arguments """ cls.parser = argparse.ArgumentParser() cls.parser.add_argument( "symbol", help="Symbol for horizontal line", nargs="*") cls.parser.add_argument( "--color", "-c", help="Color of the line", default=None, nargs=1) cls.parser.add_argument( "--version", "-v", action="version", version="0.13") return cls.parser
[ "def", "_parse_args", "(", "cls", ")", ":", "cls", ".", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "cls", ".", "parser", ".", "add_argument", "(", "\"symbol\"", ",", "help", "=", "\"Symbol for horizontal line\"", ",", "nargs", "=", "\"*\"", ")", "cls", ".", "parser", ".", "add_argument", "(", "\"--color\"", ",", "\"-c\"", ",", "help", "=", "\"Color of the line\"", ",", "default", "=", "None", ",", "nargs", "=", "1", ")", "cls", ".", "parser", ".", "add_argument", "(", "\"--version\"", ",", "\"-v\"", ",", "action", "=", "\"version\"", ",", "version", "=", "\"0.13\"", ")", "return", "cls", ".", "parser" ]
Method to parse command line arguments
[ "Method", "to", "parse", "command", "line", "arguments" ]
62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c
https://github.com/itsnauman/termrule/blob/62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c/tr/termrule.py#L45-L57
251,362
itsnauman/termrule
tr/termrule.py
TermRule.run_args
def run_args(self): """ Pass in the parsed args to the script """ self.arg_parser = self._parse_args() self.args = self.arg_parser.parse_args() color_name = self.args.color if color_name is not None: color_name = color_name[0] symbol = self.args.symbol try: self.tr(symbol, color_name) except InvalidColorException: print("Invalid Color Name!")
python
def run_args(self): """ Pass in the parsed args to the script """ self.arg_parser = self._parse_args() self.args = self.arg_parser.parse_args() color_name = self.args.color if color_name is not None: color_name = color_name[0] symbol = self.args.symbol try: self.tr(symbol, color_name) except InvalidColorException: print("Invalid Color Name!")
[ "def", "run_args", "(", "self", ")", ":", "self", ".", "arg_parser", "=", "self", ".", "_parse_args", "(", ")", "self", ".", "args", "=", "self", ".", "arg_parser", ".", "parse_args", "(", ")", "color_name", "=", "self", ".", "args", ".", "color", "if", "color_name", "is", "not", "None", ":", "color_name", "=", "color_name", "[", "0", "]", "symbol", "=", "self", ".", "args", ".", "symbol", "try", ":", "self", ".", "tr", "(", "symbol", ",", "color_name", ")", "except", "InvalidColorException", ":", "print", "(", "\"Invalid Color Name!\"", ")" ]
Pass in the parsed args to the script
[ "Pass", "in", "the", "parsed", "args", "to", "the", "script" ]
62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c
https://github.com/itsnauman/termrule/blob/62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c/tr/termrule.py#L59-L72
251,363
itsnauman/termrule
tr/termrule.py
TermRule._term_size
def _term_size(self): """ Method returns lines and columns according to terminal size """ for fd in (0, 1, 2): try: return self._ioctl_GWINSZ(fd) except: pass # try os.ctermid() try: fd = os.open(os.ctermid(), os.O_RDONLY) try: return self._ioctl_GWINSZ(fd) finally: os.close(fd) except: pass # try `stty size` try: return tuple(int(x) for x in os.popen("stty size", "r").read().split()) except: pass # try environment variables try: return tuple(int(os.getenv(var)) for var in ("LINES", "COLUMNS")) except: pass # i give up. return default. return (25, 80)
python
def _term_size(self): """ Method returns lines and columns according to terminal size """ for fd in (0, 1, 2): try: return self._ioctl_GWINSZ(fd) except: pass # try os.ctermid() try: fd = os.open(os.ctermid(), os.O_RDONLY) try: return self._ioctl_GWINSZ(fd) finally: os.close(fd) except: pass # try `stty size` try: return tuple(int(x) for x in os.popen("stty size", "r").read().split()) except: pass # try environment variables try: return tuple(int(os.getenv(var)) for var in ("LINES", "COLUMNS")) except: pass # i give up. return default. return (25, 80)
[ "def", "_term_size", "(", "self", ")", ":", "for", "fd", "in", "(", "0", ",", "1", ",", "2", ")", ":", "try", ":", "return", "self", ".", "_ioctl_GWINSZ", "(", "fd", ")", "except", ":", "pass", "# try os.ctermid()", "try", ":", "fd", "=", "os", ".", "open", "(", "os", ".", "ctermid", "(", ")", ",", "os", ".", "O_RDONLY", ")", "try", ":", "return", "self", ".", "_ioctl_GWINSZ", "(", "fd", ")", "finally", ":", "os", ".", "close", "(", "fd", ")", "except", ":", "pass", "# try `stty size`", "try", ":", "return", "tuple", "(", "int", "(", "x", ")", "for", "x", "in", "os", ".", "popen", "(", "\"stty size\"", ",", "\"r\"", ")", ".", "read", "(", ")", ".", "split", "(", ")", ")", "except", ":", "pass", "# try environment variables", "try", ":", "return", "tuple", "(", "int", "(", "os", ".", "getenv", "(", "var", ")", ")", "for", "var", "in", "(", "\"LINES\"", ",", "\"COLUMNS\"", ")", ")", "except", ":", "pass", "# i give up. return default.", "return", "(", "25", ",", "80", ")" ]
Method returns lines and columns according to terminal size
[ "Method", "returns", "lines", "and", "columns", "according", "to", "terminal", "size" ]
62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c
https://github.com/itsnauman/termrule/blob/62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c/tr/termrule.py#L77-L106
251,364
itsnauman/termrule
tr/termrule.py
TermRule.tr
def tr(self, args, color=None): """ Method to print ASCII patterns to terminal """ width = self._term_size()[1] if not args: if color is not None: print(self._echo("#" * width, color)) else: print(self._echo("#" * width, "green")) else: for each_symbol in args: chars = len(each_symbol) number_chars = width // chars if color is not None: print(self._echo(each_symbol * number_chars, color)) else: print(each_symbol * number_chars)
python
def tr(self, args, color=None): """ Method to print ASCII patterns to terminal """ width = self._term_size()[1] if not args: if color is not None: print(self._echo("#" * width, color)) else: print(self._echo("#" * width, "green")) else: for each_symbol in args: chars = len(each_symbol) number_chars = width // chars if color is not None: print(self._echo(each_symbol * number_chars, color)) else: print(each_symbol * number_chars)
[ "def", "tr", "(", "self", ",", "args", ",", "color", "=", "None", ")", ":", "width", "=", "self", ".", "_term_size", "(", ")", "[", "1", "]", "if", "not", "args", ":", "if", "color", "is", "not", "None", ":", "print", "(", "self", ".", "_echo", "(", "\"#\"", "*", "width", ",", "color", ")", ")", "else", ":", "print", "(", "self", ".", "_echo", "(", "\"#\"", "*", "width", ",", "\"green\"", ")", ")", "else", ":", "for", "each_symbol", "in", "args", ":", "chars", "=", "len", "(", "each_symbol", ")", "number_chars", "=", "width", "//", "chars", "if", "color", "is", "not", "None", ":", "print", "(", "self", ".", "_echo", "(", "each_symbol", "*", "number_chars", ",", "color", ")", ")", "else", ":", "print", "(", "each_symbol", "*", "number_chars", ")" ]
Method to print ASCII patterns to terminal
[ "Method", "to", "print", "ASCII", "patterns", "to", "terminal" ]
62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c
https://github.com/itsnauman/termrule/blob/62b8cc7e9a7fc4476ccdaf84fe2685eb529dc48c/tr/termrule.py#L108-L125
251,365
heikomuller/sco-client
scocli/__init__.py
SCOClient.cache_add
def cache_add(self, resource_url, cache_id): """Add entry permanently to local cache. Parameters ---------- resource_url : string Resource Url cache_id : string Unique cache identifier for resource """ # Add entry to cache index self.cache[resource_url] = cache_id # Write cache index content to database file with open(self.db_file, 'w') as f: for resource in self.cache: f.write(resource + '\t' + self.cache[resource] + '\n')
python
def cache_add(self, resource_url, cache_id): """Add entry permanently to local cache. Parameters ---------- resource_url : string Resource Url cache_id : string Unique cache identifier for resource """ # Add entry to cache index self.cache[resource_url] = cache_id # Write cache index content to database file with open(self.db_file, 'w') as f: for resource in self.cache: f.write(resource + '\t' + self.cache[resource] + '\n')
[ "def", "cache_add", "(", "self", ",", "resource_url", ",", "cache_id", ")", ":", "# Add entry to cache index", "self", ".", "cache", "[", "resource_url", "]", "=", "cache_id", "# Write cache index content to database file", "with", "open", "(", "self", ".", "db_file", ",", "'w'", ")", "as", "f", ":", "for", "resource", "in", "self", ".", "cache", ":", "f", ".", "write", "(", "resource", "+", "'\\t'", "+", "self", ".", "cache", "[", "resource", "]", "+", "'\\n'", ")" ]
Add entry permanently to local cache. Parameters ---------- resource_url : string Resource Url cache_id : string Unique cache identifier for resource
[ "Add", "entry", "permanently", "to", "local", "cache", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L109-L124
251,366
heikomuller/sco-client
scocli/__init__.py
SCOClient.cache_clear
def cache_clear(self): """Clear local cache by deleting all cached resources and their downloaded files. """ # Delete content of local cache directory for f in os.listdir(self.directory): f = os.path.join(self.directory, f) if os.path.isfile(f): os.remove(f) elif os.path.isdir(f): shutil.rmtree(f) # Empty cache index self.cache = {}
python
def cache_clear(self): """Clear local cache by deleting all cached resources and their downloaded files. """ # Delete content of local cache directory for f in os.listdir(self.directory): f = os.path.join(self.directory, f) if os.path.isfile(f): os.remove(f) elif os.path.isdir(f): shutil.rmtree(f) # Empty cache index self.cache = {}
[ "def", "cache_clear", "(", "self", ")", ":", "# Delete content of local cache directory", "for", "f", "in", "os", ".", "listdir", "(", "self", ".", "directory", ")", ":", "f", "=", "os", ".", "path", ".", "join", "(", "self", ".", "directory", ",", "f", ")", "if", "os", ".", "path", ".", "isfile", "(", "f", ")", ":", "os", ".", "remove", "(", "f", ")", "elif", "os", ".", "path", ".", "isdir", "(", "f", ")", ":", "shutil", ".", "rmtree", "(", "f", ")", "# Empty cache index", "self", ".", "cache", "=", "{", "}" ]
Clear local cache by deleting all cached resources and their downloaded files.
[ "Clear", "local", "cache", "by", "deleting", "all", "cached", "resources", "and", "their", "downloaded", "files", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L126-L138
251,367
heikomuller/sco-client
scocli/__init__.py
SCOClient.get_api_references
def get_api_references(self, api_url=None): """Get set of HATEOAS reference for the given SCO-API. Use the default SCO-API if none is given. References are cached as they are not expected to change. Parameters ---------- Returns ------- """ # Get subject listing Url for SCO-API if not api_url is None: url = api_url else: url = self.api_url # Check if API references are in local cache. If not send GET request # and add the result to the local cache if not url in self.apis: self.apis[url] = sco.references_to_dict( sco.JsonResource(url).json[sco.REF_LINKS] ) return self.apis[url]
python
def get_api_references(self, api_url=None): """Get set of HATEOAS reference for the given SCO-API. Use the default SCO-API if none is given. References are cached as they are not expected to change. Parameters ---------- Returns ------- """ # Get subject listing Url for SCO-API if not api_url is None: url = api_url else: url = self.api_url # Check if API references are in local cache. If not send GET request # and add the result to the local cache if not url in self.apis: self.apis[url] = sco.references_to_dict( sco.JsonResource(url).json[sco.REF_LINKS] ) return self.apis[url]
[ "def", "get_api_references", "(", "self", ",", "api_url", "=", "None", ")", ":", "# Get subject listing Url for SCO-API", "if", "not", "api_url", "is", "None", ":", "url", "=", "api_url", "else", ":", "url", "=", "self", ".", "api_url", "# Check if API references are in local cache. If not send GET request", "# and add the result to the local cache", "if", "not", "url", "in", "self", ".", "apis", ":", "self", ".", "apis", "[", "url", "]", "=", "sco", ".", "references_to_dict", "(", "sco", ".", "JsonResource", "(", "url", ")", ".", "json", "[", "sco", ".", "REF_LINKS", "]", ")", "return", "self", ".", "apis", "[", "url", "]" ]
Get set of HATEOAS reference for the given SCO-API. Use the default SCO-API if none is given. References are cached as they are not expected to change. Parameters ---------- Returns -------
[ "Get", "set", "of", "HATEOAS", "reference", "for", "the", "given", "SCO", "-", "API", ".", "Use", "the", "default", "SCO", "-", "API", "if", "none", "is", "given", ".", "References", "are", "cached", "as", "they", "are", "not", "expected", "to", "change", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L140-L162
251,368
heikomuller/sco-client
scocli/__init__.py
SCOClient.experiments_create
def experiments_create(self, name, subject_id, image_group_id, api_url=None, properties=None): """Create a new experiment at the given SCO-API. Subject and image group reference existing resources at the SCO-API. Parameters ---------- name : string User-defined name for experiment subject_id : string Unique identifier for subject at given SCO-API image_group_id : string Unique identifier for image group at given SCO-API api_url : string, optional Base Url of SCO-API where experiment will be created properties : Dictionary, optional Set of additional properties for created experiment. The given experiment name will override an existing name property in this set. Returns ------- scoserv.ExperimentHandle Handle for local copy of created experiment resource """ # Create experiment and return handle for created resource return self.experiments_get( ExperimentHandle.create( self.get_api_references(api_url)[sco.REF_EXPERIMENTS_CREATE], name, subject_id, image_group_id, properties=properties ) )
python
def experiments_create(self, name, subject_id, image_group_id, api_url=None, properties=None): """Create a new experiment at the given SCO-API. Subject and image group reference existing resources at the SCO-API. Parameters ---------- name : string User-defined name for experiment subject_id : string Unique identifier for subject at given SCO-API image_group_id : string Unique identifier for image group at given SCO-API api_url : string, optional Base Url of SCO-API where experiment will be created properties : Dictionary, optional Set of additional properties for created experiment. The given experiment name will override an existing name property in this set. Returns ------- scoserv.ExperimentHandle Handle for local copy of created experiment resource """ # Create experiment and return handle for created resource return self.experiments_get( ExperimentHandle.create( self.get_api_references(api_url)[sco.REF_EXPERIMENTS_CREATE], name, subject_id, image_group_id, properties=properties ) )
[ "def", "experiments_create", "(", "self", ",", "name", ",", "subject_id", ",", "image_group_id", ",", "api_url", "=", "None", ",", "properties", "=", "None", ")", ":", "# Create experiment and return handle for created resource", "return", "self", ".", "experiments_get", "(", "ExperimentHandle", ".", "create", "(", "self", ".", "get_api_references", "(", "api_url", ")", "[", "sco", ".", "REF_EXPERIMENTS_CREATE", "]", ",", "name", ",", "subject_id", ",", "image_group_id", ",", "properties", "=", "properties", ")", ")" ]
Create a new experiment at the given SCO-API. Subject and image group reference existing resources at the SCO-API. Parameters ---------- name : string User-defined name for experiment subject_id : string Unique identifier for subject at given SCO-API image_group_id : string Unique identifier for image group at given SCO-API api_url : string, optional Base Url of SCO-API where experiment will be created properties : Dictionary, optional Set of additional properties for created experiment. The given experiment name will override an existing name property in this set. Returns ------- scoserv.ExperimentHandle Handle for local copy of created experiment resource
[ "Create", "a", "new", "experiment", "at", "the", "given", "SCO", "-", "API", ".", "Subject", "and", "image", "group", "reference", "existing", "resources", "at", "the", "SCO", "-", "API", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L168-L200
251,369
heikomuller/sco-client
scocli/__init__.py
SCOClient.experiments_get
def experiments_get(self, resource_url): """Get handle for experiment resource at given Url. Parameters ---------- resource_url : string Url for experiment resource at SCO-API Returns ------- scoserv.ExperimentHandle Handle for local copy of experiment resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create experiment handle. Will raise an exception if resource is not # in cache and cannot be downloaded. experiment = ExperimentHandle(obj_json, self) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return experiment handle return experiment
python
def experiments_get(self, resource_url): """Get handle for experiment resource at given Url. Parameters ---------- resource_url : string Url for experiment resource at SCO-API Returns ------- scoserv.ExperimentHandle Handle for local copy of experiment resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create experiment handle. Will raise an exception if resource is not # in cache and cannot be downloaded. experiment = ExperimentHandle(obj_json, self) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return experiment handle return experiment
[ "def", "experiments_get", "(", "self", ",", "resource_url", ")", ":", "# Get resource directory, Json representation, active flag, and cache id", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id", "=", "self", ".", "get_object", "(", "resource_url", ")", "# Create experiment handle. Will raise an exception if resource is not", "# in cache and cannot be downloaded.", "experiment", "=", "ExperimentHandle", "(", "obj_json", ",", "self", ")", "# Add resource to cache if not exists", "if", "not", "cache_id", "in", "self", ".", "cache", ":", "self", ".", "cache_add", "(", "resource_url", ",", "cache_id", ")", "# Return experiment handle", "return", "experiment" ]
Get handle for experiment resource at given Url. Parameters ---------- resource_url : string Url for experiment resource at SCO-API Returns ------- scoserv.ExperimentHandle Handle for local copy of experiment resource
[ "Get", "handle", "for", "experiment", "resource", "at", "given", "Url", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L202-L224
251,370
heikomuller/sco-client
scocli/__init__.py
SCOClient.experiments_fmri_create
def experiments_fmri_create(self, experiment_url, data_file): """Upload given data file as fMRI for experiment with given Url. Parameters ---------- experiment_url : string Url for experiment resource data_file: Abs. Path to file on disk Functional data file Returns ------- scoserv.FunctionalDataHandle Handle to created fMRI resource """ # Get the experiment experiment = self.experiments_get(experiment_url) # Upload data FunctionalDataHandle.create( experiment.links[sco.REF_EXPERIMENTS_FMRI_CREATE], data_file ) # Get new fmri data handle and return it return self.experiments_get(experiment_url).fmri_data
python
def experiments_fmri_create(self, experiment_url, data_file): """Upload given data file as fMRI for experiment with given Url. Parameters ---------- experiment_url : string Url for experiment resource data_file: Abs. Path to file on disk Functional data file Returns ------- scoserv.FunctionalDataHandle Handle to created fMRI resource """ # Get the experiment experiment = self.experiments_get(experiment_url) # Upload data FunctionalDataHandle.create( experiment.links[sco.REF_EXPERIMENTS_FMRI_CREATE], data_file ) # Get new fmri data handle and return it return self.experiments_get(experiment_url).fmri_data
[ "def", "experiments_fmri_create", "(", "self", ",", "experiment_url", ",", "data_file", ")", ":", "# Get the experiment", "experiment", "=", "self", ".", "experiments_get", "(", "experiment_url", ")", "# Upload data", "FunctionalDataHandle", ".", "create", "(", "experiment", ".", "links", "[", "sco", ".", "REF_EXPERIMENTS_FMRI_CREATE", "]", ",", "data_file", ")", "# Get new fmri data handle and return it", "return", "self", ".", "experiments_get", "(", "experiment_url", ")", ".", "fmri_data" ]
Upload given data file as fMRI for experiment with given Url. Parameters ---------- experiment_url : string Url for experiment resource data_file: Abs. Path to file on disk Functional data file Returns ------- scoserv.FunctionalDataHandle Handle to created fMRI resource
[ "Upload", "given", "data", "file", "as", "fMRI", "for", "experiment", "with", "given", "Url", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L259-L282
251,371
heikomuller/sco-client
scocli/__init__.py
SCOClient.experiments_fmri_get
def experiments_fmri_get(self, resource_url): """Get handle for functional fMRI resource at given Url. Parameters ---------- resource_url : string Url for fMRI resource at SCO-API Returns ------- scoserv.FunctionalDataHandle Handle for funcrional MRI data resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create image group handle. Will raise an exception if resource is not # in cache and cannot be downloaded. fmri_data = FunctionalDataHandle(obj_json, obj_dir) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return functional data handle return fmri_data
python
def experiments_fmri_get(self, resource_url): """Get handle for functional fMRI resource at given Url. Parameters ---------- resource_url : string Url for fMRI resource at SCO-API Returns ------- scoserv.FunctionalDataHandle Handle for funcrional MRI data resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create image group handle. Will raise an exception if resource is not # in cache and cannot be downloaded. fmri_data = FunctionalDataHandle(obj_json, obj_dir) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return functional data handle return fmri_data
[ "def", "experiments_fmri_get", "(", "self", ",", "resource_url", ")", ":", "# Get resource directory, Json representation, active flag, and cache id", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id", "=", "self", ".", "get_object", "(", "resource_url", ")", "# Create image group handle. Will raise an exception if resource is not", "# in cache and cannot be downloaded.", "fmri_data", "=", "FunctionalDataHandle", "(", "obj_json", ",", "obj_dir", ")", "# Add resource to cache if not exists", "if", "not", "cache_id", "in", "self", ".", "cache", ":", "self", ".", "cache_add", "(", "resource_url", ",", "cache_id", ")", "# Return functional data handle", "return", "fmri_data" ]
Get handle for functional fMRI resource at given Url. Parameters ---------- resource_url : string Url for fMRI resource at SCO-API Returns ------- scoserv.FunctionalDataHandle Handle for funcrional MRI data resource
[ "Get", "handle", "for", "functional", "fMRI", "resource", "at", "given", "Url", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L284-L306
251,372
heikomuller/sco-client
scocli/__init__.py
SCOClient.experiments_predictions_create
def experiments_predictions_create(self, model_id, name, api_url, arguments={}, properties=None): """Create a new model run at the given SCO-API. Parameters ---------- model_id : string Unique model identifier name : string User-defined name for experiment api_url : string Url to POST create model run request arguments : Dictionary Dictionary of arguments for model run properties : Dictionary, optional Set of additional properties for created mode run. Returns ------- scoserv.ModelRunHandle Handle for local copy of created model run resource """ # Create experiment and return handle for created resource return self.experiments_predictions_get( ModelRunHandle.create( api_url, model_id, name, arguments, properties=properties ) )
python
def experiments_predictions_create(self, model_id, name, api_url, arguments={}, properties=None): """Create a new model run at the given SCO-API. Parameters ---------- model_id : string Unique model identifier name : string User-defined name for experiment api_url : string Url to POST create model run request arguments : Dictionary Dictionary of arguments for model run properties : Dictionary, optional Set of additional properties for created mode run. Returns ------- scoserv.ModelRunHandle Handle for local copy of created model run resource """ # Create experiment and return handle for created resource return self.experiments_predictions_get( ModelRunHandle.create( api_url, model_id, name, arguments, properties=properties ) )
[ "def", "experiments_predictions_create", "(", "self", ",", "model_id", ",", "name", ",", "api_url", ",", "arguments", "=", "{", "}", ",", "properties", "=", "None", ")", ":", "# Create experiment and return handle for created resource", "return", "self", ".", "experiments_predictions_get", "(", "ModelRunHandle", ".", "create", "(", "api_url", ",", "model_id", ",", "name", ",", "arguments", ",", "properties", "=", "properties", ")", ")" ]
Create a new model run at the given SCO-API. Parameters ---------- model_id : string Unique model identifier name : string User-defined name for experiment api_url : string Url to POST create model run request arguments : Dictionary Dictionary of arguments for model run properties : Dictionary, optional Set of additional properties for created mode run. Returns ------- scoserv.ModelRunHandle Handle for local copy of created model run resource
[ "Create", "a", "new", "model", "run", "at", "the", "given", "SCO", "-", "API", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L312-L342
251,373
heikomuller/sco-client
scocli/__init__.py
SCOClient.experiments_predictions_get
def experiments_predictions_get(self, resource_url): """Get handle for model run resource at given Url. Parameters ---------- resource_url : string Url for model run resource at SCO-API Returns ------- scoserv.ModelRunHandle Handle for local copy of model run resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create model run handle. Will raise an exception if resource is not # in cache and cannot be downloaded. run = ModelRunHandle(obj_json, obj_dir, self) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return model run handle return run
python
def experiments_predictions_get(self, resource_url): """Get handle for model run resource at given Url. Parameters ---------- resource_url : string Url for model run resource at SCO-API Returns ------- scoserv.ModelRunHandle Handle for local copy of model run resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create model run handle. Will raise an exception if resource is not # in cache and cannot be downloaded. run = ModelRunHandle(obj_json, obj_dir, self) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return model run handle return run
[ "def", "experiments_predictions_get", "(", "self", ",", "resource_url", ")", ":", "# Get resource directory, Json representation, active flag, and cache id", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id", "=", "self", ".", "get_object", "(", "resource_url", ")", "# Create model run handle. Will raise an exception if resource is not", "# in cache and cannot be downloaded.", "run", "=", "ModelRunHandle", "(", "obj_json", ",", "obj_dir", ",", "self", ")", "# Add resource to cache if not exists", "if", "not", "cache_id", "in", "self", ".", "cache", ":", "self", ".", "cache_add", "(", "resource_url", ",", "cache_id", ")", "# Return model run handle", "return", "run" ]
Get handle for model run resource at given Url. Parameters ---------- resource_url : string Url for model run resource at SCO-API Returns ------- scoserv.ModelRunHandle Handle for local copy of model run resource
[ "Get", "handle", "for", "model", "run", "resource", "at", "given", "Url", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L344-L366
251,374
heikomuller/sco-client
scocli/__init__.py
SCOClient.get_object
def get_object(self, resource_url): """Get remote resource information. Creates a local directory for the resource if this is the first access to the resource. Downloads the resource Json representation and writes it into a .json file in the cache directory. Raises ValueError if resource is not cached and does not exist. If the resource no longer exists on the server but in the local cache, a reference to the local copy is returned and the value of the is_active flag is False. Parameters ---------- cache_id : string Unique cache identifier resource_url : string Url of the resource Returns ------- (string, Json, Boolean, string) Returns a 4-tuple containing local resource directory, the Json object representing the resource, an active flag indicating if the resource still exists on the remote server or only in the local cache, and the resource unique cache identifier. """ # Check if resource is in local cache. If not, create a new cache # identifier and set is_cached flag to false if resource_url in self.cache: cache_id = self.cache[resource_url] else: cache_id = str(uuid.uuid4()) # The local cahce directory for resource is given by cache identifier obj_dir = os.path.join(self.directory, cache_id) # File for local copy of object's Json representation f_json = os.path.join(obj_dir, '.json') # Object active flag is_active = True # Read the remote resource representation try: obj_json = sco.JsonResource(resource_url).json # Save local copy of Json object. Create local resource directory if # it doesn't exist if not os.path.isdir(obj_dir): os.mkdir(obj_dir) with open(f_json, 'w') as f: json.dump(obj_json, f) except ValueError as ex: # If the resource does not exists but we have a local copy then read # object from local disk. Set is_active flag to false. Raise # ValueError if no local copy exists if os.path.isfile(f_json): with open(f_json, 'r') as f: obj_json = json.load(f) is_active = False else: raise ex # Return object directory, Json, active flag, and cache identifier return obj_dir, obj_json, is_active, cache_id
python
def get_object(self, resource_url): """Get remote resource information. Creates a local directory for the resource if this is the first access to the resource. Downloads the resource Json representation and writes it into a .json file in the cache directory. Raises ValueError if resource is not cached and does not exist. If the resource no longer exists on the server but in the local cache, a reference to the local copy is returned and the value of the is_active flag is False. Parameters ---------- cache_id : string Unique cache identifier resource_url : string Url of the resource Returns ------- (string, Json, Boolean, string) Returns a 4-tuple containing local resource directory, the Json object representing the resource, an active flag indicating if the resource still exists on the remote server or only in the local cache, and the resource unique cache identifier. """ # Check if resource is in local cache. If not, create a new cache # identifier and set is_cached flag to false if resource_url in self.cache: cache_id = self.cache[resource_url] else: cache_id = str(uuid.uuid4()) # The local cahce directory for resource is given by cache identifier obj_dir = os.path.join(self.directory, cache_id) # File for local copy of object's Json representation f_json = os.path.join(obj_dir, '.json') # Object active flag is_active = True # Read the remote resource representation try: obj_json = sco.JsonResource(resource_url).json # Save local copy of Json object. Create local resource directory if # it doesn't exist if not os.path.isdir(obj_dir): os.mkdir(obj_dir) with open(f_json, 'w') as f: json.dump(obj_json, f) except ValueError as ex: # If the resource does not exists but we have a local copy then read # object from local disk. Set is_active flag to false. Raise # ValueError if no local copy exists if os.path.isfile(f_json): with open(f_json, 'r') as f: obj_json = json.load(f) is_active = False else: raise ex # Return object directory, Json, active flag, and cache identifier return obj_dir, obj_json, is_active, cache_id
[ "def", "get_object", "(", "self", ",", "resource_url", ")", ":", "# Check if resource is in local cache. If not, create a new cache", "# identifier and set is_cached flag to false", "if", "resource_url", "in", "self", ".", "cache", ":", "cache_id", "=", "self", ".", "cache", "[", "resource_url", "]", "else", ":", "cache_id", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "# The local cahce directory for resource is given by cache identifier", "obj_dir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "directory", ",", "cache_id", ")", "# File for local copy of object's Json representation", "f_json", "=", "os", ".", "path", ".", "join", "(", "obj_dir", ",", "'.json'", ")", "# Object active flag", "is_active", "=", "True", "# Read the remote resource representation", "try", ":", "obj_json", "=", "sco", ".", "JsonResource", "(", "resource_url", ")", ".", "json", "# Save local copy of Json object. Create local resource directory if", "# it doesn't exist", "if", "not", "os", ".", "path", ".", "isdir", "(", "obj_dir", ")", ":", "os", ".", "mkdir", "(", "obj_dir", ")", "with", "open", "(", "f_json", ",", "'w'", ")", "as", "f", ":", "json", ".", "dump", "(", "obj_json", ",", "f", ")", "except", "ValueError", "as", "ex", ":", "# If the resource does not exists but we have a local copy then read", "# object from local disk. Set is_active flag to false. Raise", "# ValueError if no local copy exists", "if", "os", ".", "path", ".", "isfile", "(", "f_json", ")", ":", "with", "open", "(", "f_json", ",", "'r'", ")", "as", "f", ":", "obj_json", "=", "json", ".", "load", "(", "f", ")", "is_active", "=", "False", "else", ":", "raise", "ex", "# Return object directory, Json, active flag, and cache identifier", "return", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id" ]
Get remote resource information. Creates a local directory for the resource if this is the first access to the resource. Downloads the resource Json representation and writes it into a .json file in the cache directory. Raises ValueError if resource is not cached and does not exist. If the resource no longer exists on the server but in the local cache, a reference to the local copy is returned and the value of the is_active flag is False. Parameters ---------- cache_id : string Unique cache identifier resource_url : string Url of the resource Returns ------- (string, Json, Boolean, string) Returns a 4-tuple containing local resource directory, the Json object representing the resource, an active flag indicating if the resource still exists on the remote server or only in the local cache, and the resource unique cache identifier.
[ "Get", "remote", "resource", "information", ".", "Creates", "a", "local", "directory", "for", "the", "resource", "if", "this", "is", "the", "first", "access", "to", "the", "resource", ".", "Downloads", "the", "resource", "Json", "representation", "and", "writes", "it", "into", "a", ".", "json", "file", "in", "the", "cache", "directory", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L459-L517
251,375
heikomuller/sco-client
scocli/__init__.py
SCOClient.image_groups_get
def image_groups_get(self, resource_url): """Get handle for image group resource at given Url. Parameters ---------- resource_url : string Url for image group resource at SCO-API Returns ------- scoserv.ImageGroupHandle Handle for local copy of image group resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create image group handle. Will raise an exception if resource is not # in cache and cannot be downloaded. image_group = ImageGroupHandle(obj_json, obj_dir) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return image group handle return image_group
python
def image_groups_get(self, resource_url): """Get handle for image group resource at given Url. Parameters ---------- resource_url : string Url for image group resource at SCO-API Returns ------- scoserv.ImageGroupHandle Handle for local copy of image group resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create image group handle. Will raise an exception if resource is not # in cache and cannot be downloaded. image_group = ImageGroupHandle(obj_json, obj_dir) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return image group handle return image_group
[ "def", "image_groups_get", "(", "self", ",", "resource_url", ")", ":", "# Get resource directory, Json representation, active flag, and cache id", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id", "=", "self", ".", "get_object", "(", "resource_url", ")", "# Create image group handle. Will raise an exception if resource is not", "# in cache and cannot be downloaded.", "image_group", "=", "ImageGroupHandle", "(", "obj_json", ",", "obj_dir", ")", "# Add resource to cache if not exists", "if", "not", "cache_id", "in", "self", ".", "cache", ":", "self", ".", "cache_add", "(", "resource_url", ",", "cache_id", ")", "# Return image group handle", "return", "image_group" ]
Get handle for image group resource at given Url. Parameters ---------- resource_url : string Url for image group resource at SCO-API Returns ------- scoserv.ImageGroupHandle Handle for local copy of image group resource
[ "Get", "handle", "for", "image", "group", "resource", "at", "given", "Url", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L554-L576
251,376
heikomuller/sco-client
scocli/__init__.py
SCOClient.image_groups_list
def image_groups_list(self, api_url=None, offset=0, limit=-1, properties=None): """Get list of image group resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per image group in the listing) """ # Get subject listing Url for given SCO-API and return the retrieved # resource listing return sco.get_resource_listing( self.get_api_references(api_url)[sco.REF_IMAGE_GROUPS_LIST], offset, limit, properties )
python
def image_groups_list(self, api_url=None, offset=0, limit=-1, properties=None): """Get list of image group resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per image group in the listing) """ # Get subject listing Url for given SCO-API and return the retrieved # resource listing return sco.get_resource_listing( self.get_api_references(api_url)[sco.REF_IMAGE_GROUPS_LIST], offset, limit, properties )
[ "def", "image_groups_list", "(", "self", ",", "api_url", "=", "None", ",", "offset", "=", "0", ",", "limit", "=", "-", "1", ",", "properties", "=", "None", ")", ":", "# Get subject listing Url for given SCO-API and return the retrieved", "# resource listing", "return", "sco", ".", "get_resource_listing", "(", "self", ".", "get_api_references", "(", "api_url", ")", "[", "sco", ".", "REF_IMAGE_GROUPS_LIST", "]", ",", "offset", ",", "limit", ",", "properties", ")" ]
Get list of image group resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per image group in the listing)
[ "Get", "list", "of", "image", "group", "resources", "from", "a", "SCO", "-", "API", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L578-L605
251,377
heikomuller/sco-client
scocli/__init__.py
SCOClient.models_get
def models_get(self, resource_url): """Get handle for model resource at given Url. Parameters ---------- resource_url : string Url for subject resource at SCO-API Returns ------- models.ModelHandle Handle for local copy of subject resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create model handle. model = ModelHandle(obj_json) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return subject handle return model
python
def models_get(self, resource_url): """Get handle for model resource at given Url. Parameters ---------- resource_url : string Url for subject resource at SCO-API Returns ------- models.ModelHandle Handle for local copy of subject resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create model handle. model = ModelHandle(obj_json) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return subject handle return model
[ "def", "models_get", "(", "self", ",", "resource_url", ")", ":", "# Get resource directory, Json representation, active flag, and cache id", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id", "=", "self", ".", "get_object", "(", "resource_url", ")", "# Create model handle.", "model", "=", "ModelHandle", "(", "obj_json", ")", "# Add resource to cache if not exists", "if", "not", "cache_id", "in", "self", ".", "cache", ":", "self", ".", "cache_add", "(", "resource_url", ",", "cache_id", ")", "# Return subject handle", "return", "model" ]
Get handle for model resource at given Url. Parameters ---------- resource_url : string Url for subject resource at SCO-API Returns ------- models.ModelHandle Handle for local copy of subject resource
[ "Get", "handle", "for", "model", "resource", "at", "given", "Url", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L611-L632
251,378
heikomuller/sco-client
scocli/__init__.py
SCOClient.models_list
def models_list(self, api_url=None, offset=0, limit=-1, properties=None): """Get list of model resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per model in the listing) """ # Get subject listing Url for given SCO-API and return the retrieved # resource listing return sco.get_resource_listing( self.get_api_references(api_url)[sco.REF_MODELS_LIST], offset, limit, properties )
python
def models_list(self, api_url=None, offset=0, limit=-1, properties=None): """Get list of model resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per model in the listing) """ # Get subject listing Url for given SCO-API and return the retrieved # resource listing return sco.get_resource_listing( self.get_api_references(api_url)[sco.REF_MODELS_LIST], offset, limit, properties )
[ "def", "models_list", "(", "self", ",", "api_url", "=", "None", ",", "offset", "=", "0", ",", "limit", "=", "-", "1", ",", "properties", "=", "None", ")", ":", "# Get subject listing Url for given SCO-API and return the retrieved", "# resource listing", "return", "sco", ".", "get_resource_listing", "(", "self", ".", "get_api_references", "(", "api_url", ")", "[", "sco", ".", "REF_MODELS_LIST", "]", ",", "offset", ",", "limit", ",", "properties", ")" ]
Get list of model resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per model in the listing)
[ "Get", "list", "of", "model", "resources", "from", "a", "SCO", "-", "API", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L634-L661
251,379
heikomuller/sco-client
scocli/__init__.py
SCOClient.subjects_create
def subjects_create(self, filename, api_url=None, properties=None): """Create new anatomy subject at given SCO-API by uploading local file. Expects an tar-archive containing a FreeSurfer anatomy. Parameters ---------- filename : string Path to tar-archive on local disk api_url : string, optional Base Url of SCO-API where subject will be created properties : Dictionary, optional Set of additional properties for created subject Returns ------- scoserv.SubjectHandle Handle for local copy of created image group resource """ # Create image group and return handle for created resource return self.subjects_get( SubjectHandle.create( self.get_api_references(api_url)[sco.REF_SUBJECTS_CREATE], filename, properties ) )
python
def subjects_create(self, filename, api_url=None, properties=None): """Create new anatomy subject at given SCO-API by uploading local file. Expects an tar-archive containing a FreeSurfer anatomy. Parameters ---------- filename : string Path to tar-archive on local disk api_url : string, optional Base Url of SCO-API where subject will be created properties : Dictionary, optional Set of additional properties for created subject Returns ------- scoserv.SubjectHandle Handle for local copy of created image group resource """ # Create image group and return handle for created resource return self.subjects_get( SubjectHandle.create( self.get_api_references(api_url)[sco.REF_SUBJECTS_CREATE], filename, properties ) )
[ "def", "subjects_create", "(", "self", ",", "filename", ",", "api_url", "=", "None", ",", "properties", "=", "None", ")", ":", "# Create image group and return handle for created resource", "return", "self", ".", "subjects_get", "(", "SubjectHandle", ".", "create", "(", "self", ".", "get_api_references", "(", "api_url", ")", "[", "sco", ".", "REF_SUBJECTS_CREATE", "]", ",", "filename", ",", "properties", ")", ")" ]
Create new anatomy subject at given SCO-API by uploading local file. Expects an tar-archive containing a FreeSurfer anatomy. Parameters ---------- filename : string Path to tar-archive on local disk api_url : string, optional Base Url of SCO-API where subject will be created properties : Dictionary, optional Set of additional properties for created subject Returns ------- scoserv.SubjectHandle Handle for local copy of created image group resource
[ "Create", "new", "anatomy", "subject", "at", "given", "SCO", "-", "API", "by", "uploading", "local", "file", ".", "Expects", "an", "tar", "-", "archive", "containing", "a", "FreeSurfer", "anatomy", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L667-L692
251,380
heikomuller/sco-client
scocli/__init__.py
SCOClient.subjects_get
def subjects_get(self, resource_url): """Get handle for subject resource at given Url. Parameters ---------- resource_url : string Url for subject resource at SCO-API Returns ------- scoserv.SubjectHandle Handle for local copy of subject resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create subject handle. Will raise an exception if resource is not # in cache and cannot be downloaded. subject = SubjectHandle(obj_json, obj_dir) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return subject handle return subject
python
def subjects_get(self, resource_url): """Get handle for subject resource at given Url. Parameters ---------- resource_url : string Url for subject resource at SCO-API Returns ------- scoserv.SubjectHandle Handle for local copy of subject resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create subject handle. Will raise an exception if resource is not # in cache and cannot be downloaded. subject = SubjectHandle(obj_json, obj_dir) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return subject handle return subject
[ "def", "subjects_get", "(", "self", ",", "resource_url", ")", ":", "# Get resource directory, Json representation, active flag, and cache id", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id", "=", "self", ".", "get_object", "(", "resource_url", ")", "# Create subject handle. Will raise an exception if resource is not", "# in cache and cannot be downloaded.", "subject", "=", "SubjectHandle", "(", "obj_json", ",", "obj_dir", ")", "# Add resource to cache if not exists", "if", "not", "cache_id", "in", "self", ".", "cache", ":", "self", ".", "cache_add", "(", "resource_url", ",", "cache_id", ")", "# Return subject handle", "return", "subject" ]
Get handle for subject resource at given Url. Parameters ---------- resource_url : string Url for subject resource at SCO-API Returns ------- scoserv.SubjectHandle Handle for local copy of subject resource
[ "Get", "handle", "for", "subject", "resource", "at", "given", "Url", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L694-L716
251,381
heikomuller/sco-client
scocli/__init__.py
SCOClient.subjects_list
def subjects_list(self, api_url=None, offset=0, limit=-1, properties=None): """Get list of subject resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per subject in the listing) """ # Get subject listing Url for given SCO-API and return the retrieved # resource listing return sco.get_resource_listing( self.get_api_references(api_url)[sco.REF_SUBJECTS_LIST], offset, limit, properties )
python
def subjects_list(self, api_url=None, offset=0, limit=-1, properties=None): """Get list of subject resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per subject in the listing) """ # Get subject listing Url for given SCO-API and return the retrieved # resource listing return sco.get_resource_listing( self.get_api_references(api_url)[sco.REF_SUBJECTS_LIST], offset, limit, properties )
[ "def", "subjects_list", "(", "self", ",", "api_url", "=", "None", ",", "offset", "=", "0", ",", "limit", "=", "-", "1", ",", "properties", "=", "None", ")", ":", "# Get subject listing Url for given SCO-API and return the retrieved", "# resource listing", "return", "sco", ".", "get_resource_listing", "(", "self", ".", "get_api_references", "(", "api_url", ")", "[", "sco", ".", "REF_SUBJECTS_LIST", "]", ",", "offset", ",", "limit", ",", "properties", ")" ]
Get list of subject resources from a SCO-API. Parameters ---------- api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per subject in the listing)
[ "Get", "list", "of", "subject", "resources", "from", "a", "SCO", "-", "API", "." ]
c4afab71297f73003379bba4c1679be9dcf7cef8
https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/__init__.py#L718-L745
251,382
mattbierner/blotre-py
blotre.py
create_disposable
def create_disposable(clientInfo, config = {}): """ Create a new disposable client. """ response = requests.put( _format_url( _extend(DEFAULT_CONFIG, config), OAUTH2_ROOT + 'disposable'), json = clientInfo) if response.status_code != 200: return None else: body = response.json() return Blotre({ 'client_id': body['id'], 'client_secret': body['secret'], 'code': body['code'] }, config = config)
python
def create_disposable(clientInfo, config = {}): """ Create a new disposable client. """ response = requests.put( _format_url( _extend(DEFAULT_CONFIG, config), OAUTH2_ROOT + 'disposable'), json = clientInfo) if response.status_code != 200: return None else: body = response.json() return Blotre({ 'client_id': body['id'], 'client_secret': body['secret'], 'code': body['code'] }, config = config)
[ "def", "create_disposable", "(", "clientInfo", ",", "config", "=", "{", "}", ")", ":", "response", "=", "requests", ".", "put", "(", "_format_url", "(", "_extend", "(", "DEFAULT_CONFIG", ",", "config", ")", ",", "OAUTH2_ROOT", "+", "'disposable'", ")", ",", "json", "=", "clientInfo", ")", "if", "response", ".", "status_code", "!=", "200", ":", "return", "None", "else", ":", "body", "=", "response", ".", "json", "(", ")", "return", "Blotre", "(", "{", "'client_id'", ":", "body", "[", "'id'", "]", ",", "'client_secret'", ":", "body", "[", "'secret'", "]", ",", "'code'", ":", "body", "[", "'code'", "]", "}", ",", "config", "=", "config", ")" ]
Create a new disposable client.
[ "Create", "a", "new", "disposable", "client", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L328-L346
251,383
mattbierner/blotre-py
blotre.py
_get_existing_disposable_app
def _get_existing_disposable_app(file, clientInfo, conf): """ Attempt to load an existing """ if not os.path.isfile(file): return None else: data = None with open(file, 'r') as f: data = json.load(f) if not 'client' in data or not 'creds' in data: return None return _BlotreDisposableApp(file, data['client'], creds = data['creds'], config = conf)
python
def _get_existing_disposable_app(file, clientInfo, conf): """ Attempt to load an existing """ if not os.path.isfile(file): return None else: data = None with open(file, 'r') as f: data = json.load(f) if not 'client' in data or not 'creds' in data: return None return _BlotreDisposableApp(file, data['client'], creds = data['creds'], config = conf)
[ "def", "_get_existing_disposable_app", "(", "file", ",", "clientInfo", ",", "conf", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "file", ")", ":", "return", "None", "else", ":", "data", "=", "None", "with", "open", "(", "file", ",", "'r'", ")", "as", "f", ":", "data", "=", "json", ".", "load", "(", "f", ")", "if", "not", "'client'", "in", "data", "or", "not", "'creds'", "in", "data", ":", "return", "None", "return", "_BlotreDisposableApp", "(", "file", ",", "data", "[", "'client'", "]", ",", "creds", "=", "data", "[", "'creds'", "]", ",", "config", "=", "conf", ")" ]
Attempt to load an existing
[ "Attempt", "to", "load", "an", "existing" ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L373-L388
251,384
mattbierner/blotre-py
blotre.py
_try_redeem_disposable_app
def _try_redeem_disposable_app(file, client): """ Attempt to redeem a one time code registred on the client. """ redeemedClient = client.redeem_onetime_code(None) if redeemedClient is None: return None else: return _BlotreDisposableApp(file, redeemedClient.client, creds = redeemedClient.creds, config = redeemedClient.config)
python
def _try_redeem_disposable_app(file, client): """ Attempt to redeem a one time code registred on the client. """ redeemedClient = client.redeem_onetime_code(None) if redeemedClient is None: return None else: return _BlotreDisposableApp(file, redeemedClient.client, creds = redeemedClient.creds, config = redeemedClient.config)
[ "def", "_try_redeem_disposable_app", "(", "file", ",", "client", ")", ":", "redeemedClient", "=", "client", ".", "redeem_onetime_code", "(", "None", ")", "if", "redeemedClient", "is", "None", ":", "return", "None", "else", ":", "return", "_BlotreDisposableApp", "(", "file", ",", "redeemedClient", ".", "client", ",", "creds", "=", "redeemedClient", ".", "creds", ",", "config", "=", "redeemedClient", ".", "config", ")" ]
Attempt to redeem a one time code registred on the client.
[ "Attempt", "to", "redeem", "a", "one", "time", "code", "registred", "on", "the", "client", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L390-L401
251,385
mattbierner/blotre-py
blotre.py
_check_app_is_valid
def _check_app_is_valid(client): """ Check to see if the app has valid creds. """ try: if 'refresh_token' in client.creds: client.exchange_refresh_token() else: existing.get_token_info() return True except TokenEndpointError as e: return False
python
def _check_app_is_valid(client): """ Check to see if the app has valid creds. """ try: if 'refresh_token' in client.creds: client.exchange_refresh_token() else: existing.get_token_info() return True except TokenEndpointError as e: return False
[ "def", "_check_app_is_valid", "(", "client", ")", ":", "try", ":", "if", "'refresh_token'", "in", "client", ".", "creds", ":", "client", ".", "exchange_refresh_token", "(", ")", "else", ":", "existing", ".", "get_token_info", "(", ")", "return", "True", "except", "TokenEndpointError", "as", "e", ":", "return", "False" ]
Check to see if the app has valid creds.
[ "Check", "to", "see", "if", "the", "app", "has", "valid", "creds", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L411-L422
251,386
mattbierner/blotre-py
blotre.py
create_disposable_app
def create_disposable_app(clientInfo, config={}): """ Use an existing disposable app if data exists or create a new one and persist the data. """ file = _get_disposable_app_filename(clientInfo) existing = _get_existing_disposable_app(file, clientInfo, config) if existing: if _check_app_is_valid(existing): return existing else: print("Existing client has expired, must recreate.") return _create_new_disposable_app(file, clientInfo, config)
python
def create_disposable_app(clientInfo, config={}): """ Use an existing disposable app if data exists or create a new one and persist the data. """ file = _get_disposable_app_filename(clientInfo) existing = _get_existing_disposable_app(file, clientInfo, config) if existing: if _check_app_is_valid(existing): return existing else: print("Existing client has expired, must recreate.") return _create_new_disposable_app(file, clientInfo, config)
[ "def", "create_disposable_app", "(", "clientInfo", ",", "config", "=", "{", "}", ")", ":", "file", "=", "_get_disposable_app_filename", "(", "clientInfo", ")", "existing", "=", "_get_existing_disposable_app", "(", "file", ",", "clientInfo", ",", "config", ")", "if", "existing", ":", "if", "_check_app_is_valid", "(", "existing", ")", ":", "return", "existing", "else", ":", "print", "(", "\"Existing client has expired, must recreate.\"", ")", "return", "_create_new_disposable_app", "(", "file", ",", "clientInfo", ",", "config", ")" ]
Use an existing disposable app if data exists or create a new one and persist the data.
[ "Use", "an", "existing", "disposable", "app", "if", "data", "exists", "or", "create", "a", "new", "one", "and", "persist", "the", "data", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L424-L437
251,387
mattbierner/blotre-py
blotre.py
Blotre.set_creds
def set_creds(self, newCreds): """Manually update the current creds.""" self.creds = newCreds self.on_creds_changed(newCreds) return self
python
def set_creds(self, newCreds): """Manually update the current creds.""" self.creds = newCreds self.on_creds_changed(newCreds) return self
[ "def", "set_creds", "(", "self", ",", "newCreds", ")", ":", "self", ".", "creds", "=", "newCreds", "self", ".", "on_creds_changed", "(", "newCreds", ")", "return", "self" ]
Manually update the current creds.
[ "Manually", "update", "the", "current", "creds", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L86-L90
251,388
mattbierner/blotre-py
blotre.py
Blotre.normalize_uri
def normalize_uri(self, uri): """Convert a stream path into it's normalized form.""" return urllib.quote( re.sub(r"\s", '+', uri.strip().lower()), safe = '~@#$&()*!+=:),.?/\'')
python
def normalize_uri(self, uri): """Convert a stream path into it's normalized form.""" return urllib.quote( re.sub(r"\s", '+', uri.strip().lower()), safe = '~@#$&()*!+=:),.?/\'')
[ "def", "normalize_uri", "(", "self", ",", "uri", ")", ":", "return", "urllib", ".", "quote", "(", "re", ".", "sub", "(", "r\"\\s\"", ",", "'+'", ",", "uri", ".", "strip", "(", ")", ".", "lower", "(", ")", ")", ",", "safe", "=", "'~@#$&()*!+=:),.?/\\''", ")" ]
Convert a stream path into it's normalized form.
[ "Convert", "a", "stream", "path", "into", "it", "s", "normalized", "form", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L98-L102
251,389
mattbierner/blotre-py
blotre.py
Blotre.get_authorization_url
def get_authorization_url(self): """Get the authorization Url for the current client.""" return self._format_url( OAUTH2_ROOT + 'authorize', query = { 'response_type': 'code', 'client_id': self.client.get('client_id', ''), 'redirect_uri': self.client.get('redirect_uri', '') })
python
def get_authorization_url(self): """Get the authorization Url for the current client.""" return self._format_url( OAUTH2_ROOT + 'authorize', query = { 'response_type': 'code', 'client_id': self.client.get('client_id', ''), 'redirect_uri': self.client.get('redirect_uri', '') })
[ "def", "get_authorization_url", "(", "self", ")", ":", "return", "self", ".", "_format_url", "(", "OAUTH2_ROOT", "+", "'authorize'", ",", "query", "=", "{", "'response_type'", ":", "'code'", ",", "'client_id'", ":", "self", ".", "client", ".", "get", "(", "'client_id'", ",", "''", ")", ",", "'redirect_uri'", ":", "self", ".", "client", ".", "get", "(", "'redirect_uri'", ",", "''", ")", "}", ")" ]
Get the authorization Url for the current client.
[ "Get", "the", "authorization", "Url", "for", "the", "current", "client", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L121-L129
251,390
mattbierner/blotre-py
blotre.py
Blotre._access_token_endpoint
def _access_token_endpoint(self, grantType, extraParams={}): """ Base exchange of data for an access_token. """ response = requests.post( self._format_url(OAUTH2_ROOT + 'access_token'), data = _extend({ 'grant_type': grantType, 'client_id': self.client.get('client_id', ''), 'client_secret': self.client.get('client_secret', ''), 'redirect_uri': self.client.get('redirect_uri', '') }, extraParams)) data = response.json() if 'error' in data or 'error_description' in data: raise _token_error_from_data(data) else: return self.set_creds(data)
python
def _access_token_endpoint(self, grantType, extraParams={}): """ Base exchange of data for an access_token. """ response = requests.post( self._format_url(OAUTH2_ROOT + 'access_token'), data = _extend({ 'grant_type': grantType, 'client_id': self.client.get('client_id', ''), 'client_secret': self.client.get('client_secret', ''), 'redirect_uri': self.client.get('redirect_uri', '') }, extraParams)) data = response.json() if 'error' in data or 'error_description' in data: raise _token_error_from_data(data) else: return self.set_creds(data)
[ "def", "_access_token_endpoint", "(", "self", ",", "grantType", ",", "extraParams", "=", "{", "}", ")", ":", "response", "=", "requests", ".", "post", "(", "self", ".", "_format_url", "(", "OAUTH2_ROOT", "+", "'access_token'", ")", ",", "data", "=", "_extend", "(", "{", "'grant_type'", ":", "grantType", ",", "'client_id'", ":", "self", ".", "client", ".", "get", "(", "'client_id'", ",", "''", ")", ",", "'client_secret'", ":", "self", ".", "client", ".", "get", "(", "'client_secret'", ",", "''", ")", ",", "'redirect_uri'", ":", "self", ".", "client", ".", "get", "(", "'redirect_uri'", ",", "''", ")", "}", ",", "extraParams", ")", ")", "data", "=", "response", ".", "json", "(", ")", "if", "'error'", "in", "data", "or", "'error_description'", "in", "data", ":", "raise", "_token_error_from_data", "(", "data", ")", "else", ":", "return", "self", ".", "set_creds", "(", "data", ")" ]
Base exchange of data for an access_token.
[ "Base", "exchange", "of", "data", "for", "an", "access_token", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L138-L155
251,391
mattbierner/blotre-py
blotre.py
Blotre.get_token_info
def get_token_info(self): """ Get information about the current access token. """ response = requests.get( self._format_url(OAUTH2_ROOT + 'token_info', { 'token': self.creds['access_token'] })) data = response.json() if response.status_code != 200: raise _token_error_from_data(data) else: return data
python
def get_token_info(self): """ Get information about the current access token. """ response = requests.get( self._format_url(OAUTH2_ROOT + 'token_info', { 'token': self.creds['access_token'] })) data = response.json() if response.status_code != 200: raise _token_error_from_data(data) else: return data
[ "def", "get_token_info", "(", "self", ")", ":", "response", "=", "requests", ".", "get", "(", "self", ".", "_format_url", "(", "OAUTH2_ROOT", "+", "'token_info'", ",", "{", "'token'", ":", "self", ".", "creds", "[", "'access_token'", "]", "}", ")", ")", "data", "=", "response", ".", "json", "(", ")", "if", "response", ".", "status_code", "!=", "200", ":", "raise", "_token_error_from_data", "(", "data", ")", "else", ":", "return", "data" ]
Get information about the current access token.
[ "Get", "information", "about", "the", "current", "access", "token", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L185-L197
251,392
mattbierner/blotre-py
blotre.py
Blotre._add_auth_headers
def _add_auth_headers(self, base): """Attach the acces_token to a request.""" if 'access_token' in self.creds: return _extend(base, { 'authorization': 'Bearer ' + self.creds['access_token'] }) return base
python
def _add_auth_headers(self, base): """Attach the acces_token to a request.""" if 'access_token' in self.creds: return _extend(base, { 'authorization': 'Bearer ' + self.creds['access_token'] }) return base
[ "def", "_add_auth_headers", "(", "self", ",", "base", ")", ":", "if", "'access_token'", "in", "self", ".", "creds", ":", "return", "_extend", "(", "base", ",", "{", "'authorization'", ":", "'Bearer '", "+", "self", ".", "creds", "[", "'access_token'", "]", "}", ")", "return", "base" ]
Attach the acces_token to a request.
[ "Attach", "the", "acces_token", "to", "a", "request", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L200-L206
251,393
mattbierner/blotre-py
blotre.py
Blotre._is_expired_response
def _is_expired_response(self, response): """ Check if the response failed because of an expired access token. """ if response.status_code != 401: return False challenge = response.headers.get('www-authenticate', '') return 'error="invalid_token"' in challenge
python
def _is_expired_response(self, response): """ Check if the response failed because of an expired access token. """ if response.status_code != 401: return False challenge = response.headers.get('www-authenticate', '') return 'error="invalid_token"' in challenge
[ "def", "_is_expired_response", "(", "self", ",", "response", ")", ":", "if", "response", ".", "status_code", "!=", "401", ":", "return", "False", "challenge", "=", "response", ".", "headers", ".", "get", "(", "'www-authenticate'", ",", "''", ")", "return", "'error=\"invalid_token\"'", "in", "challenge" ]
Check if the response failed because of an expired access token.
[ "Check", "if", "the", "response", "failed", "because", "of", "an", "expired", "access", "token", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L208-L215
251,394
mattbierner/blotre-py
blotre.py
Blotre._make_request
def _make_request(self, type, path, args, noRetry=False): """ Make a request to Blot're. Attempts to reply the request if it fails due to an expired access token. """ response = getattr(requests, type)(path, headers = self._add_auth_headers(_JSON_HEADERS), **args) if response.status_code == 200 or response.status_code == 201: return response.json() elif not noRetry and self._is_expired_response(response) \ and 'refresh_token' in self.creds: try: self.exchange_refresh_token() except TokenEndpointError: raise _rest_error_from_response(response) return self._make_request(type, path, args, noRetry = True) raise _rest_error_from_response(response)
python
def _make_request(self, type, path, args, noRetry=False): """ Make a request to Blot're. Attempts to reply the request if it fails due to an expired access token. """ response = getattr(requests, type)(path, headers = self._add_auth_headers(_JSON_HEADERS), **args) if response.status_code == 200 or response.status_code == 201: return response.json() elif not noRetry and self._is_expired_response(response) \ and 'refresh_token' in self.creds: try: self.exchange_refresh_token() except TokenEndpointError: raise _rest_error_from_response(response) return self._make_request(type, path, args, noRetry = True) raise _rest_error_from_response(response)
[ "def", "_make_request", "(", "self", ",", "type", ",", "path", ",", "args", ",", "noRetry", "=", "False", ")", ":", "response", "=", "getattr", "(", "requests", ",", "type", ")", "(", "path", ",", "headers", "=", "self", ".", "_add_auth_headers", "(", "_JSON_HEADERS", ")", ",", "*", "*", "args", ")", "if", "response", ".", "status_code", "==", "200", "or", "response", ".", "status_code", "==", "201", ":", "return", "response", ".", "json", "(", ")", "elif", "not", "noRetry", "and", "self", ".", "_is_expired_response", "(", "response", ")", "and", "'refresh_token'", "in", "self", ".", "creds", ":", "try", ":", "self", ".", "exchange_refresh_token", "(", ")", "except", "TokenEndpointError", ":", "raise", "_rest_error_from_response", "(", "response", ")", "return", "self", ".", "_make_request", "(", "type", ",", "path", ",", "args", ",", "noRetry", "=", "True", ")", "raise", "_rest_error_from_response", "(", "response", ")" ]
Make a request to Blot're. Attempts to reply the request if it fails due to an expired access token.
[ "Make", "a", "request", "to", "Blot", "re", ".", "Attempts", "to", "reply", "the", "request", "if", "it", "fails", "due", "to", "an", "expired", "access", "token", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L217-L234
251,395
mattbierner/blotre-py
blotre.py
Blotre.put
def put(self, path, body): """PUT request.""" return self._make_request('put', self._format_url(API_ROOT + path), { 'json': body })
python
def put(self, path, body): """PUT request.""" return self._make_request('put', self._format_url(API_ROOT + path), { 'json': body })
[ "def", "put", "(", "self", ",", "path", ",", "body", ")", ":", "return", "self", ".", "_make_request", "(", "'put'", ",", "self", ".", "_format_url", "(", "API_ROOT", "+", "path", ")", ",", "{", "'json'", ":", "body", "}", ")" ]
PUT request.
[ "PUT", "request", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L248-L253
251,396
mattbierner/blotre-py
blotre.py
Blotre.get_child
def get_child(self, streamId, childId, options={}): """Get the child of a stream.""" return self.get('stream/' + streamId + '/children/' + childId, options)
python
def get_child(self, streamId, childId, options={}): """Get the child of a stream.""" return self.get('stream/' + streamId + '/children/' + childId, options)
[ "def", "get_child", "(", "self", ",", "streamId", ",", "childId", ",", "options", "=", "{", "}", ")", ":", "return", "self", ".", "get", "(", "'stream/'", "+", "streamId", "+", "'/children/'", "+", "childId", ",", "options", ")" ]
Get the child of a stream.
[ "Get", "the", "child", "of", "a", "stream", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L294-L296
251,397
mattbierner/blotre-py
blotre.py
_BlotreDisposableApp._persist
def _persist(self): """Persist client data.""" with open(self.file, 'w') as f: json.dump({ 'client': self.client, 'creds': self.creds, 'config': self.config }, f)
python
def _persist(self): """Persist client data.""" with open(self.file, 'w') as f: json.dump({ 'client': self.client, 'creds': self.creds, 'config': self.config }, f)
[ "def", "_persist", "(", "self", ")", ":", "with", "open", "(", "self", ".", "file", ",", "'w'", ")", "as", "f", ":", "json", ".", "dump", "(", "{", "'client'", ":", "self", ".", "client", ",", "'creds'", ":", "self", ".", "creds", ",", "'config'", ":", "self", ".", "config", "}", ",", "f", ")" ]
Persist client data.
[ "Persist", "client", "data", "." ]
c98228d1159bc651aad546e442b0acbf97b1e043
https://github.com/mattbierner/blotre-py/blob/c98228d1159bc651aad546e442b0acbf97b1e043/blotre.py#L358-L365
251,398
etscrivner/nose-perfdump
perfdump/plugin.py
PerfDumpPlugin.options
def options(self, parser, env=os.environ): """Handle parsing additional command-line options""" super(PerfDumpPlugin, self).options(parser, env=env) parser.add_option("", "--perfdump-html", dest="perfdump_html_file", help="Set destination for HTML report output")
python
def options(self, parser, env=os.environ): """Handle parsing additional command-line options""" super(PerfDumpPlugin, self).options(parser, env=env) parser.add_option("", "--perfdump-html", dest="perfdump_html_file", help="Set destination for HTML report output")
[ "def", "options", "(", "self", ",", "parser", ",", "env", "=", "os", ".", "environ", ")", ":", "super", "(", "PerfDumpPlugin", ",", "self", ")", ".", "options", "(", "parser", ",", "env", "=", "env", ")", "parser", ".", "add_option", "(", "\"\"", ",", "\"--perfdump-html\"", ",", "dest", "=", "\"perfdump_html_file\"", ",", "help", "=", "\"Set destination for HTML report output\"", ")" ]
Handle parsing additional command-line options
[ "Handle", "parsing", "additional", "command", "-", "line", "options" ]
a203a68495d30346fab43fb903cb60cd29b17d49
https://github.com/etscrivner/nose-perfdump/blob/a203a68495d30346fab43fb903cb60cd29b17d49/perfdump/plugin.py#L87-L91
251,399
etscrivner/nose-perfdump
perfdump/plugin.py
PerfDumpPlugin.configure
def configure(self, options, conf): """Configure this plugin using the given options""" super(PerfDumpPlugin, self).configure(options, conf) if not self.enabled: return try: self.html_output_file = options.perfdump_html_file except: pass self.db = SqliteConnection.get(self.database_name)
python
def configure(self, options, conf): """Configure this plugin using the given options""" super(PerfDumpPlugin, self).configure(options, conf) if not self.enabled: return try: self.html_output_file = options.perfdump_html_file except: pass self.db = SqliteConnection.get(self.database_name)
[ "def", "configure", "(", "self", ",", "options", ",", "conf", ")", ":", "super", "(", "PerfDumpPlugin", ",", "self", ")", ".", "configure", "(", "options", ",", "conf", ")", "if", "not", "self", ".", "enabled", ":", "return", "try", ":", "self", ".", "html_output_file", "=", "options", ".", "perfdump_html_file", "except", ":", "pass", "self", ".", "db", "=", "SqliteConnection", ".", "get", "(", "self", ".", "database_name", ")" ]
Configure this plugin using the given options
[ "Configure", "this", "plugin", "using", "the", "given", "options" ]
a203a68495d30346fab43fb903cb60cd29b17d49
https://github.com/etscrivner/nose-perfdump/blob/a203a68495d30346fab43fb903cb60cd29b17d49/perfdump/plugin.py#L93-L102