signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def _escape_str ( value ) : """Escape a value into a TS3 compatible string @ param value : Value @ type value : string / int"""
if isinstance ( value , int ) : return "%d" % value value = value . replace ( "\\" , r'\\' ) for i , j in ts3_escape . items ( ) : value = value . replace ( i , j ) return value
def _ScheduleVariableHunt ( hunt_obj ) : """Schedules flows for a variable hunt ."""
if hunt_obj . client_rate != 0 : raise VariableHuntCanNotHaveClientRateError ( hunt_obj . hunt_id , hunt_obj . client_rate ) seen_clients = set ( ) for flow_group in hunt_obj . args . variable . flow_groups : for client_id in flow_group . client_ids : if client_id in seen_clients : raise Can...
def to_html ( self , classes = None , notebook = False ) : """Render a DataFrame to a html table . Parameters notebook : { True , False } , optional , default False Whether the generated HTML is for IPython Notebook ."""
html_renderer = HTMLFormatter ( self , classes = classes , max_rows = self . max_rows , max_cols = self . max_cols , notebook = notebook ) if hasattr ( self . buf , 'write' ) : html_renderer . write_result ( self . buf ) elif isinstance ( self . buf , six . string_types ) : with open ( self . buf , 'w' ) as f :...
def clearData ( self , type : str = '' ) -> None : """Remove data of type foramt . If type argument is omitted , remove all data ."""
type = normalize_type ( type ) if not type : self . __data . clear ( ) elif type in self . __data : del self . __data [ type ]
def _find_scalac_plugins ( self , scalac_plugins , classpath ) : """Returns a map from plugin name to list of plugin classpath entries . The first entry in each list is the classpath entry containing the plugin metadata . The rest are the internal transitive deps of the plugin . This allows us to have in - re...
# Allow multiple flags and also comma - separated values in a single flag . plugin_names = { p for val in scalac_plugins for p in val . split ( ',' ) } if not plugin_names : return { } active_plugins = { } buildroot = get_buildroot ( ) cp_product = self . context . products . get_data ( 'runtime_classpath' ) for cl...
def split_key ( key ) : """Splits a node key ."""
if key == KEY_SEP : return ( ) key_chunks = tuple ( key . strip ( KEY_SEP ) . split ( KEY_SEP ) ) if key_chunks [ 0 ] . startswith ( KEY_SEP ) : return ( key_chunks [ 0 ] [ len ( KEY_SEP ) : ] , ) + key_chunks [ 1 : ] else : return key_chunks
def add_curves_from_las ( self , fname , remap = None , funcs = None ) : """Given a LAS file , add curves from it to the current well instance . Essentially just wraps ` ` add _ curves _ from _ lasio ( ) ` ` . Args : fname ( str ) : The path of the LAS file to read curves from . remap ( dict ) : Optional . ...
try : # To treat as a single file self . add_curves_from_lasio ( lasio . read ( fname ) , remap = remap , funcs = funcs ) except : # It ' s a list ! for f in fname : self . add_curves_from_lasio ( lasio . read ( f ) , remap = remap , funcs = funcs ) return None
async def reset_user_password ( self , username ) : """Reset user password . : param str username : Username : returns : A : class : ` ~ juju . user . User ` instance"""
user_facade = client . UserManagerFacade . from_connection ( self . connection ( ) ) entity = client . Entity ( tag . user ( username ) ) results = await user_facade . ResetPassword ( [ entity ] ) secret_key = results . results [ 0 ] . secret_key return await self . get_user ( username , secret_key = secret_key )
def remove_by_example ( cls , collection , example_data , wait_for_sync = None , limit = None ) : """This will find all documents in the collection that match the specified example object . Note : the limit attribute is not supported on sharded collections . Using it will result in an error . The options attrib...
kwargs = { 'options' : { 'waitForSync' : wait_for_sync , 'limit' : limit , } } return cls . _construct_query ( name = 'remove-by-example' , collection = collection , example = example_data , result = False , ** kwargs )
def get_environment ( id = None , name = None ) : """Get a specific Environment by name or ID"""
data = get_environment_raw ( id , name ) if data : return utils . format_json ( data )
def get_scan_log_lines ( self , source_id , scan_id ) : """Get the log text for a Scan : rtype : Iterator over log lines ."""
return self . client . get_manager ( Scan ) . get_log_lines ( source_id = source_id , scan_id = scan_id )
def compute_file_metrics ( processors , language , key , token_list ) : """use processors to compute file metrics ."""
# multiply iterator tli = itertools . tee ( token_list , len ( processors ) ) metrics = OrderedDict ( ) # reset all processors for p in processors : p . reset ( ) # process all tokens for p , tl in zip ( processors , tli ) : p . process_file ( language , key , tl ) # collect metrics from all processors for p in...
def find_field_by_name ( browser , field_type , name ) : """Locate the control input with the given ` ` name ` ` . : param browser : ` ` world . browser ` ` : param string field _ type : a field type ( i . e . ` button ` ) : param string name : ` ` name ` ` attribute Returns : an : class : ` ElementSelector...
return ElementSelector ( browser , field_xpath ( field_type , 'name' ) % string_literal ( name ) , filter_displayed = True , )
def convert_type ( self , type ) : """Convert type to BigQuery"""
# Mapping mapping = { 'any' : 'STRING' , 'array' : None , 'boolean' : 'BOOLEAN' , 'date' : 'DATE' , 'datetime' : 'DATETIME' , 'duration' : None , 'geojson' : None , 'geopoint' : None , 'integer' : 'INTEGER' , 'number' : 'FLOAT' , 'object' : None , 'string' : 'STRING' , 'time' : 'TIME' , 'year' : 'INTEGER' , 'yearmonth'...
def ReadChildFlowObjects ( self , client_id , flow_id ) : """Reads flows that were started by a given flow from the database ."""
res = [ ] for flow in itervalues ( self . flows ) : if flow . client_id == client_id and flow . parent_flow_id == flow_id : res . append ( flow ) return res
def calc_zscale ( self , data , contrast = 0.25 , num_points = 1000 , num_per_row = None ) : """From the IRAF documentation : The zscale algorithm is designed to display the image values near the median image value without the time consuming process of computing a full image histogram . This is particularly u...
assert len ( data . shape ) >= 2 , AutoCutsError ( "input data should be 2D or greater" ) ht , wd = data . shape [ : 2 ] assert ( 0.0 < contrast <= 1.0 ) , AutoCutsError ( "contrast (%.2f) not in range 0 < c <= 1" % ( contrast ) ) # calculate num _ points parameter , if omitted total_points = np . size ( data ) if num_...
def getSlicesString ( self ) : """Returns a string representation of the slices that are used to get the sliced array . For example returns ' [ : , 5 ] ' if the combo box selects dimension 0 and the spin box 5."""
if not self . rtiIsSliceable : return '' # The dimensions that are selected in the combo boxes will be set to slice ( None ) , # the values from the spin boxes will be set as a single integer value nDims = self . rti . nDims sliceList = [ ':' ] * nDims for spinBox in self . _spinBoxes : dimNr = spinBox . proper...
def evaluate ( self , data ) : """Evaluate the code needed to compute a given Data object ."""
try : inputs = copy . deepcopy ( data . input ) hydrate_input_references ( inputs , data . process . input_schema ) hydrate_input_uploads ( inputs , data . process . input_schema ) # Include special ' proc ' variable in the context . inputs [ 'proc' ] = { 'data_id' : data . id , 'data_dir' : self . ...
def markers ( data , marker , f_tooltip = None , marker_preferred_size = 32 ) : """Draw markers : param data : data access object : param marker : full filename of the marker image : param f _ tooltip : function to generate a tooltip on mouseover : param marker _ preferred _ size : size in pixel for the mar...
from geoplotlib . layers import MarkersLayer _global_config . layers . append ( MarkersLayer ( data , marker , f_tooltip , marker_preferred_size ) )
def add_options ( self ) : """Add program options ."""
super ( RtorrentControl , self ) . add_options ( ) # basic options self . add_bool_option ( "--help-fields" , help = "show available fields and their description" ) self . add_bool_option ( "-n" , "--dry-run" , help = "don't commit changes, just tell what would happen" ) self . add_bool_option ( "--detach" , help = "ru...
def validate_ids ( ctx , param , value ) : """Validate a list of IDs and convert them to a list ."""
if not value : return None ids = [ x . strip ( ) for x in value . split ( ',' ) ] for id_item in ids : if not id_item . isdigit ( ) : raise click . BadParameter ( 'Non-numeric value "{0}" provided for an ID.' . format ( id_item ) ) return ids
def visitValueSetValue ( self , ctx : ShExDocParser . ValueSetValueContext ) : """valueSetValue : iriRange | literalRange | languageRange | ' . ' ( iriExclusion + | literalExclusion + | languageExclusion + )"""
if ctx . iriRange ( ) or ctx . literalRange ( ) or ctx . languageRange ( ) : self . visitChildren ( ctx ) else : # ' . ' branch - wild card with exclusions if ctx . iriExclusion ( ) : vs_value = IriStemRange ( Wildcard ( ) , [ ] ) self . _iri_exclusions ( vs_value , ctx . iriExclusion ( ) ) ...
def register ( self , token , regexp ) : """Register a token . Args : token ( Token ) : the token class to register regexp ( str ) : the regexp for that token"""
self . _tokens . append ( ( token , re . compile ( regexp ) ) )
def tryDynMod ( name ) : '''Dynamically import a python module or exception .'''
try : return importlib . import_module ( name ) except ModuleNotFoundError : raise s_exc . NoSuchDyn ( name = name )
def add_periodic_callback ( self , callback , period_milliseconds , callback_id = None ) : """Adds a callback to be run every period _ milliseconds until it is removed . Returns an ID that can be used with remove _ periodic _ callback ."""
cb = _AsyncPeriodic ( callback , period_milliseconds , io_loop = self . _loop ) callback_id = self . _assign_remover ( callback , callback_id , self . _periodic_callback_removers , cb . stop ) cb . start ( ) return callback_id
def my_on_connect ( client ) : """Example on _ connect handler ."""
client . send ( 'You connected from %s\n' % client . addrport ( ) ) if CLIENTS : client . send ( 'Also connected are:\n' ) for neighbor in CLIENTS : client . send ( '%s\n' % neighbor . addrport ( ) ) else : client . send ( 'Sadly, you are alone.\n' ) CLIENTS . append ( client )
def init ( plugin_manager , _ , _2 , config ) : """Init the plugin Available configuration : plugins : - plugin _ module : inginious . frontend . plugins . git _ repo repo _ directory : " . / repo _ submissions " """
submission_git_saver = SubmissionGitSaver ( plugin_manager , config ) submission_git_saver . daemon = True submission_git_saver . start ( )
def compile ( * files , exe_name = None , cc = CC , ** cflags ) : """Compile C source files . : param files : filenames to be compiled : param exe _ name : name of resulting executable : param cc : compiler to use ( : data : ` check50 . c . CC ` by default ) : param cflags : additional flags to pass to the ...
if not files : raise RuntimeError ( _ ( "compile requires at least one file" ) ) if exe_name is None and files [ 0 ] . endswith ( ".c" ) : exe_name = Path ( files [ 0 ] ) . stem files = " " . join ( files ) flags = CFLAGS . copy ( ) flags . update ( cflags ) flags = " " . join ( ( f"-{flag}" + ( f"={value}" if ...
def _wait_on_metadata ( self , topic , max_wait ) : """Wait for cluster metadata including partitions for the given topic to be available . Arguments : topic ( str ) : topic we want metadata for max _ wait ( float ) : maximum time in secs for waiting on the metadata Returns : set : partition ids for the...
# add topic to metadata topic list if it is not there already . self . _sender . add_topic ( topic ) begin = time . time ( ) elapsed = 0.0 metadata_event = None while True : partitions = self . _metadata . partitions_for_topic ( topic ) if partitions is not None : return partitions if not metadata_e...
def clone ( self , ** data ) : '''Utility method for cloning the instance as a new object . : parameter data : additional which override field data . : rtype : a new instance of this class .'''
meta = self . _meta session = self . session pkname = meta . pkname ( ) pkvalue = data . pop ( pkname , None ) fields = self . todict ( exclude_cache = True ) fields . update ( data ) fields . pop ( '__dbdata__' , None ) obj = self . _meta . make_object ( ( pkvalue , None , fields ) ) obj . session = session return obj
def ext_pillar ( minion_id , # pylint : disable = W0613 pillar , # pylint : disable = W0613 command ) : '''Execute a command and read the output as YAMLEX'''
try : command = command . replace ( '%s' , minion_id ) return deserialize ( __salt__ [ 'cmd.run' ] ( '{0}' . format ( command ) ) ) except Exception : log . critical ( 'YAML data from %s failed to parse' , command ) return { }
def to_representation ( self , value ) : """Transform the * outgoing * native value into primitive data ."""
raise NotImplementedError ( '{cls}.to_representation() must be implemented for field ' '{field_name}. If you do not need to support write operations ' 'you probably want to subclass `ReadOnlyField` instead.' . format ( cls = self . __class__ . __name__ , field_name = self . field_name , ) )
def assertrepr_compare ( config , op , left , right ) : """Return specialised explanations for some operators / operands"""
width = 80 - 15 - len ( op ) - 2 # 15 chars indentation , 1 space around op left_repr = py . io . saferepr ( left , maxsize = int ( width // 2 ) ) right_repr = py . io . saferepr ( right , maxsize = width - len ( left_repr ) ) summary = u ( '%s %s %s' ) % ( ecu ( left_repr ) , op , ecu ( right_repr ) ) issequence = lam...
def _generate_filename ( cls , writer_spec , name , job_id , num , attempt = None , seg_index = None ) : """Generates a filename for a particular output . Args : writer _ spec : specification dictionary for the output writer . name : name of the job . job _ id : the ID number assigned to the job . num : s...
naming_format = cls . _TMP_FILE_NAMING_FORMAT if seg_index is None : naming_format = writer_spec . get ( cls . NAMING_FORMAT_PARAM , cls . _DEFAULT_NAMING_FORMAT ) template = string . Template ( naming_format ) try : # Check that template doesn ' t use undefined mappings and is formatted well if seg_index is No...
def WriteEventBody ( self , event ) : """Writes the body of an event object to the output . Args : event ( EventObject ) : event . Raises : NoFormatterFound : If no event formatter can be found to match the data type in the event object ."""
output_values = self . _GetOutputValues ( event ) output_values [ 3 ] = self . _output_mediator . GetMACBRepresentation ( event ) output_values [ 6 ] = event . timestamp_desc or '-' self . _WriteOutputValues ( output_values )
def maybe_load_model ( savedir , container ) : """Load model if present at the specified path ."""
if savedir is None : return state_path = os . path . join ( os . path . join ( savedir , 'training_state.pkl.zip' ) ) if container is not None : logger . log ( "Attempting to download model from Azure" ) found_model = container . get ( savedir , 'training_state.pkl.zip' ) else : found_model = os . path ...
def create_zip ( self , clean = True , increment_version = True , register = True ) : """Creates a GenePattern module zip file for upload and installation on a GenePattern server : param clean : boolean : return :"""
# First validate the attributes self . validate ( ) # Check to see if an existing interferes with module creation if os . path . exists ( MANIFEST_FILE_NAME ) : raise OSError ( "existing manifest blocks manifest file creation" ) # Write the manifest self . write_manifest ( ) # Create the zip self . _zip_files ( ) #...
def configure_logger ( logger , filename , folder , log_level ) : '''Configure logging behvior for the simulations .'''
fmt = logging . Formatter ( '%(asctime)s %(levelname)s: %(message)s' ) if folder is not None : log_file = os . path . join ( folder , filename ) hdl = logging . FileHandler ( log_file ) hdl . setFormatter ( fmt ) hdl . setLevel ( log_level ) logger . addHandler ( hdl ) shdl = logging . StreamHandler...
def visitAnnotation ( self , ctx : ShExDocParser . AnnotationContext ) : """annotation : ' / / ' predicate ( iri | literal )"""
# Annotations apply to the expression , NOT the shape ( ! ) annot = Annotation ( self . context . predicate_to_IRI ( ctx . predicate ( ) ) ) if ctx . iri ( ) : annot . object = self . context . iri_to_iriref ( ctx . iri ( ) ) else : annot . object = self . context . literal_to_ObjectLiteral ( ctx . literal ( ) ...
def get_user_deliveryserver ( self , domainid , serverid ) : """Get a user delivery server"""
return self . api_call ( ENDPOINTS [ 'userdeliveryservers' ] [ 'get' ] , dict ( domainid = domainid , serverid = serverid ) )
def R ( self , value ) : """measurement uncertainty"""
self . _R = value self . _R1_2 = cholesky ( self . _R , lower = True )
def solve ( self ) : """Runs a power flow @ rtype : dict @ return : Solution dictionary with the following keys : - C { V } - final complex voltages - C { converged } - boolean value indicating if the solver converged or not - C { iterations } - the number of iterations performed"""
# Zero result attributes . self . case . reset ( ) # Retrieve the contents of the case . b , l , g , _ , _ , _ , _ = self . _unpack_case ( self . case ) # Update bus indexes . self . case . index_buses ( b ) # Index buses accoding to type . # try : # _ , pq , pv , pvpq = self . _ index _ buses ( b ) # except SlackBusEr...
def _http_put ( self , url , data , ** kwargs ) : """Performs the HTTP PUT request ."""
kwargs . update ( { 'data' : json . dumps ( data ) } ) return self . _http_request ( 'put' , url , kwargs )
def parse ( fileobject , schema = None ) : """Parses a file object This functon parses a KML file object , and optionally validates it against a provided schema ."""
if schema : # with validation parser = objectify . makeparser ( schema = schema . schema , strip_cdata = False ) return objectify . parse ( fileobject , parser = parser ) else : # without validation return objectify . parse ( fileobject )
def get_path ( self , dir = None ) : """Return path relative to the current working directory of the Node . FS . Base object that owns us ."""
if not dir : dir = self . fs . getcwd ( ) if self == dir : return '.' path_elems = self . get_path_elements ( ) pathname = '' try : i = path_elems . index ( dir ) except ValueError : for p in path_elems [ : - 1 ] : pathname += p . dirname else : for p in path_elems [ i + 1 : - 1 ] : ...
def cleanup_custom_options ( id , weakref = None ) : """Cleans up unused custom trees if all objects referencing the custom id have been garbage collected or tree is otherwise unreferenced ."""
try : if Store . _options_context : return weakrefs = Store . _weakrefs . get ( id , [ ] ) if weakref in weakrefs : weakrefs . remove ( weakref ) refs = [ ] for wr in list ( weakrefs ) : r = wr ( ) if r is None or r . id != id : weakrefs . remove ( wr ) ...
def _string_parser ( strip_whitespace ) : """Return a parser function for parsing string values ."""
def _parse_string_value ( element_text , _state ) : if element_text is None : value = '' elif strip_whitespace : value = element_text . strip ( ) else : value = element_text return value return _parse_string_value
def generate_is_role_functions ( cls , roles ) : """Generate ` class . is _ { role } ( ) ` methods for a class . : param class cls : The python class to be modified . : param dict roles : The roles to use for generation . This method is intended to be used by an inheriting class to generate ` is _ { role } ...
for access_role in roles . keys ( ) : setattr ( cls , "is_" + access_role , lambda x : False )
def peek_16 ( library , session , address ) : """Read an 16 - bit value from the specified address . Corresponds to viPeek16 function of the VISA library . : param library : the visa library wrapped by ctypes . : param session : Unique logical identifier to a session . : param address : Source address to re...
value_16 = ViUInt16 ( ) ret = library . viPeek16 ( session , address , byref ( value_16 ) ) return value_16 . value , ret
def delete_translations ( self , language = None ) : """Deletes related translations ."""
from . models import Translation return Translation . objects . delete_translations ( obj = self , language = language )
def update_user ( self , user_is_artist = "" , artist_level = "" , artist_specialty = "" , real_name = "" , tagline = "" , countryid = "" , website = "" , bio = "" ) : """Update the users profile information : param user _ is _ artist : Is the user an artist ? : param artist _ level : If the user is an artist ,...
if self . standard_grant_type is not "authorization_code" : raise DeviantartError ( "Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint." ) post_data = { } if user_is_artist : post_data [ "user_is_artist" ] = user_is_artist if artist_level : post_data [ "...
def kill_process ( procname , scriptname ) : """kill WSGI processes that may be running in development"""
# from http : / / stackoverflow . com / a / 2940878 import signal import subprocess p = subprocess . Popen ( [ 'ps' , 'aux' ] , stdout = subprocess . PIPE ) out , err = p . communicate ( ) for line in out . decode ( ) . splitlines ( ) : if procname in line and scriptname in line : pid = int ( line . split (...
def results ( self ) : """All metrics Returns dict results in a dictionary format"""
results = { 'overall' : self . results_overall_metrics ( ) , 'class_wise' : self . results_class_wise_metrics ( ) , 'class_wise_average' : self . results_class_wise_average_metrics ( ) } return results
def _perform_replacements ( self , chars ) : '''Performs simple key / value string replacements that require no logic . This is used to convert the fullwidth rōmaji , several ligatures , and the punctuation characters .'''
for n in range ( len ( chars ) ) : char = chars [ n ] if char in repl : chars [ n ] = repl [ char ] # Some replacements might result in multi - character strings # being inserted into the list . Ensure we still have a list # of single characters for iteration . return list ( '' . join ( chars ) )
def bool_str ( string ) : """Returns a boolean from a string imput of ' true ' or ' false '"""
if string not in BOOL_STRS : raise ValueError ( 'Invalid boolean string: "{}"' . format ( string ) ) return True if string == 'true' else False
def translate_stage_name ( stage ) : """Account for potential variability in stage / phase name definition . Since a pipeline author is free to name his / her processing phases / stages as desired , but these choices influence file names , enforce some standardization . Specifically , prohibit potentially pro...
# First ensure that we have text . name = parse_stage_name ( stage ) # Cast to string to ensure that indexed stages ( ints are handled ) . return str ( name ) . lower ( ) . replace ( " " , STAGE_NAME_SPACE_REPLACEMENT )
def reset_to_coefficients ( self ) : """Keeps only the coefficient . This can be used to recalculate the IO tables for a new finald demand . Note The system can not be reconstructed after this steps because all absolute data is removed . Save the Y data in case a reconstruction might be necessary ."""
# Development note : The coefficient attributes are # defined in self . _ _ coefficients _ _ [ setattr ( self , key , None ) for key in self . get_DataFrame ( data = False , with_unit = False , with_population = False ) if key not in self . __coefficients__ ] return self
def cache_key ( self , repo : str , branch : str , task : Task , git_repo : Repo ) -> str : """Returns the key used for storing results in cache ."""
return "{repo}_{branch}_{hash}_{task}" . format ( repo = self . repo_id ( repo ) , branch = branch , hash = self . current_git_hash ( repo , branch , git_repo ) , task = task . hash )
def _get_property ( device_path : Union [ Path , str ] , property_name : str ) -> str : """Gets the given property for a device ."""
with open ( str ( Path ( device_path , property_name ) ) ) as file : return file . readline ( ) . strip ( )
def get_device_name ( file_name , sys_obj_id , delimiter = ":" ) : """Get device name by its SNMP sysObjectID property from the file map : param str file _ name : : param str sys _ obj _ id : : param str delimiter : : rtype : str"""
try : with open ( file_name , "rb" ) as csv_file : csv_reader = csv . reader ( csv_file , delimiter = delimiter ) for row in csv_reader : if len ( row ) >= 2 and row [ 0 ] == sys_obj_id : return row [ 1 ] except IOError : pass # file does not exists return sys_obj_id
def wait_socket ( _socket , session , timeout = 1 ) : """Helper function for testing non - blocking mode . This function blocks the calling thread for < timeout > seconds - to be used only for testing purposes . Also available at ` ssh2 . utils . wait _ socket `"""
directions = session . block_directions ( ) if directions == 0 : return 0 readfds = [ _socket ] if ( directions & LIBSSH2_SESSION_BLOCK_INBOUND ) else ( ) writefds = [ _socket ] if ( directions & LIBSSH2_SESSION_BLOCK_OUTBOUND ) else ( ) return select ( readfds , writefds , ( ) , timeout )
def default_cell_formatter ( table , column , row , value , ** _ ) : """: type column : tri . table . Column"""
formatter = _cell_formatters . get ( type ( value ) ) if formatter : value = formatter ( table = table , column = column , row = row , value = value ) if value is None : return '' return conditional_escape ( value )
def create_record ( self , rtype = None , name = None , content = None , ** kwargs ) : """Create record . If record already exists with the same content , do nothing ."""
if not rtype and kwargs . get ( 'type' ) : warnings . warn ( 'Parameter "type" is deprecated, use "rtype" instead.' , DeprecationWarning ) rtype = kwargs . get ( 'type' ) return self . _create_record ( rtype , name , content )
def load_private_key ( key_file , key_password = None ) : """Load a private key from disk . : param key _ file : File path to key file . : param key _ password : Optional . If the key file is encrypted , provide the password to decrypt it . Defaults to None . : return : PrivateKey < string >"""
key_file = os . path . expanduser ( key_file ) key_file = os . path . abspath ( key_file ) if not key_password : with open ( key_file , 'r' ) as key : return key . read ( ) with open ( key_file , 'rb' ) as key : key_bytes = key . read ( ) return decrypt_key ( key_bytes , key_password ) . decode ( ENCODI...
def _call_widget_constructed ( widget ) : """Static method , called when a widget is constructed ."""
if Widget . _widget_construction_callback is not None and callable ( Widget . _widget_construction_callback ) : Widget . _widget_construction_callback ( widget )
def get_key ( cls , key ) : "Get the path to ` key ` in the config file ."
return cls . get ( ) . get ( key , cls . DEFAULT_CONFIG . get ( key , None ) )
def register ( self , key_or_tag , obj ) : """Register a custom Transit tag and new parsing function with the decoder . Also , you can optionally set the ' default _ decoder ' with this function . Your new tag and parse / decode function will be added to the interal dictionary of decoders for this Decoder obj...
if key_or_tag == "default_decoder" : self . options [ "default_decoder" ] = obj else : self . decoders [ key_or_tag ] = obj
def probe ( self , key_id = None , ssh_user = None ) : """If no parameter is provided , mist . io will try to probe the machine with the default : param key _ id : Optional . Give if you explicitly want to probe with this key _ id : param ssh _ user : Optional . Give if you explicitly want a specific user ...
ips = [ ip for ip in self . info [ 'public_ips' ] if ':' not in ip ] if not ips : raise Exception ( "No public IPv4 address available to connect to" ) payload = { 'host' : ips [ 0 ] , 'key' : key_id , 'ssh_user' : ssh_user } data = json . dumps ( payload ) req = self . request ( self . mist_client . uri + "/clouds/...
def from_dir ( cwd ) : "Context manager to ensure in the cwd directory ."
import os curdir = os . getcwd ( ) try : os . chdir ( cwd ) yield finally : os . chdir ( curdir )
def _get_audit_defaults ( option = None ) : '''Loads audit . csv defaults into a dict in _ _ context _ _ called ' lgpo . audit _ defaults ' . The dictionary includes fieldnames and all configurable policies as keys . The values are used to create / modify the ` ` audit . csv ` ` file . The first entry is ` fi...
if 'lgpo.audit_defaults' not in __context__ : # Get available setting names and GUIDs # This is used to get the fieldnames and GUIDs for individual policies log . debug ( 'Loading auditpol defaults into __context__' ) dump = __utils__ [ 'auditpol.get_auditpol_dump' ] ( ) reader = csv . DictReader ( dump ) ...
def update ( self , skill = None , author = None ) : """Update all downloaded skills or one specified skill ."""
if skill is None : return self . update_all ( ) else : if isinstance ( skill , str ) : skill = self . find_skill ( skill , author ) entry = get_skill_entry ( skill . name , self . skills_data ) if entry : entry [ 'beta' ] = skill . is_beta if skill . update ( ) : # On successful upda...
def provider ( func = None , * , singleton = False , injector = None ) : """Decorator to mark a function as a provider . Args : singleton ( bool ) : The returned value should be a singleton or shared instance . If False ( the default ) the provider function will be invoked again for every time it ' s needed...
def decorator ( func ) : wrapped = _wrap_provider_func ( func , { 'singleton' : singleton } ) if injector : injector . register_provider ( wrapped ) return wrapped if func : return decorator ( func ) return decorator
def check_infile_and_wp ( curinf , curwp ) : """Check the existence of the given file and directory path . 1 . Raise Runtime exception of both not existed . 2 . If the ` ` curwp ` ` is None , the set the base folder of ` ` curinf ` ` to it ."""
if not os . path . exists ( curinf ) : if curwp is None : TauDEM . error ( 'You must specify one of the workspace and the ' 'full path of input file!' ) curinf = curwp + os . sep + curinf curinf = os . path . abspath ( curinf ) if not os . path . exists ( curinf ) : TauDEM . error ( 'Inp...
def install ( path , name = None ) : """Compiles a Thrift file and installs it as a submodule of the caller . Given a tree organized like so : : foo / _ _ init _ _ . py bar . py my _ service . thrift You would do , . . code - block : : python my _ service = thriftrw . install ( ' my _ service . thri...
if name is None : name = os . path . splitext ( os . path . basename ( path ) ) [ 0 ] callermod = inspect . getmodule ( inspect . stack ( ) [ 1 ] [ 0 ] ) name = '%s.%s' % ( callermod . __name__ , name ) if name in sys . modules : return sys . modules [ name ] if not os . path . isabs ( path ) : callerfile =...
def stripped_name ( self ) : """Remove extraneous information from C + + demangled function names ."""
name = self . name # Strip function parameters from name by recursively removing paired parenthesis while True : name , n = self . _parenthesis_re . subn ( '' , name ) if not n : break # Strip const qualifier name = self . _const_re . sub ( '' , name ) # Strip template parameters from name by recursivel...
def cancel_broadcast ( self , broadcast_guid ) : '''Cancel a broadcast specified by guid'''
subpath = 'broadcasts/%s/update' % broadcast_guid broadcast = { 'status' : 'CANCELED' } bcast_dict = self . _call ( subpath , method = 'POST' , data = broadcast , content_type = 'application/json' ) return bcast_dict
def _track_tasks ( task_ids , cluster ) : """Poll task status until STOPPED"""
while True : statuses = _get_task_statuses ( task_ids , cluster ) if all ( [ status == 'STOPPED' for status in statuses ] ) : logger . info ( 'ECS tasks {0} STOPPED' . format ( ',' . join ( task_ids ) ) ) break time . sleep ( POLL_TIME ) logger . debug ( 'ECS task status for tasks {0}: {...
def delete_mappings_in_network ( network_id , network_2_id = None , ** kwargs ) : """Delete all the resource attribute mappings in a network . If another network is specified , only delete the mappings between the two networks ."""
qry = db . DBSession . query ( ResourceAttrMap ) . filter ( or_ ( ResourceAttrMap . network_a_id == network_id , ResourceAttrMap . network_b_id == network_id ) ) if network_2_id is not None : qry = qry . filter ( or_ ( ResourceAttrMap . network_a_id == network_2_id , ResourceAttrMap . network_b_id == network_2_id )...
def max_subsequence_sum ( arr : list , length : int , i_index : int , k_index : int ) -> int : """Calculate the maximum sum of an increasing subsequence from the start of the list up to the index ' i _ index ' . Subsequence must include the element at position ' k _ index ' ( where k _ index > i _ index ) . Arg...
dp = [ [ 0 for _ in range ( length ) ] for _ in range ( length ) ] for i in range ( length ) : dp [ 0 ] [ i ] = arr [ i ] + arr [ 0 ] if arr [ i ] > arr [ 0 ] else arr [ i ] for i in range ( 1 , length ) : for j in range ( length ) : if arr [ j ] > arr [ i ] and j > i : dp [ i ] [ j ] = max ...
async def delete ( self , * args , ** kwargs ) : '''Corresponds to DELETE request with a resource identifier , deleting a single document from the database'''
pk = self . pk_type ( kwargs [ 'pk' ] ) result = await self . _meta . object_class . delete_entries ( db = self . db , query = { self . pk : pk } ) if result . acknowledged : if result . deleted_count == 0 : raise NotFound ( ) else : raise BadRequest ( 'Failed to delete object' )
def histogram ( transform , dimensions , nbins ) : '''Computes the N - dimensional histogram of the transformed data . Parameters transform : pyemma . coordinates . transfrom . Transformer object transform that provides the input data dimensions : tuple of indices indices of the dimensions you want to exa...
maximum = np . ones ( len ( dimensions ) ) * ( - np . inf ) minimum = np . ones ( len ( dimensions ) ) * np . inf # compute min and max for _ , chunk in transform : maximum = np . max ( np . vstack ( ( maximum , np . max ( chunk [ : , dimensions ] , axis = 0 ) ) ) , axis = 0 ) minimum = np . min ( np . vstack (...
def noise_plot ( signal , noise , normalise = False , ** kwargs ) : """Plot signal and noise fourier transforms and the difference . : type signal : ` obspy . core . stream . Stream ` : param signal : Stream of " signal " window : type noise : ` obspy . core . stream . Stream ` : param noise : Stream of the...
import matplotlib . pyplot as plt # Work out how many traces we can plot n_traces = 0 for tr in signal : try : noise . select ( id = tr . id ) [ 0 ] except IndexError : # pragma : no cover continue n_traces += 1 fig , axes = plt . subplots ( n_traces , 2 , sharex = True ) if len ( signal ) >...
def op ( self , i , o ) : """Tries to update the registers values with the given instruction ."""
for ii in range ( len ( o ) ) : if is_register ( o [ ii ] ) : o [ ii ] = o [ ii ] . lower ( ) if i == 'ld' : self . set ( o [ 0 ] , o [ 1 ] ) return if i == 'push' : if valnum ( self . regs [ 'sp' ] ) : self . set ( 'sp' , ( self . getv ( self . regs [ 'sp' ] ) - 2 ) % 0xFFFF ) else ...
def get_swagger_operation ( self , context = default_context ) : """get the swagger _ schema operation representation ."""
consumes = produces = context . contenttype_serializers . keys ( ) parameters = get_swagger_parameters ( self . parameters , context ) responses = { "400" : Response ( { "description" : "invalid input received" , "schema" : Schema ( { "title" : "FailureObject" , "type" : "object" , "properties" : { "success" : { "type"...
def query ( self , zipcode = None , prefix = None , pattern = None , city = None , state = None , lat = None , lng = None , radius = None , population_lower = None , population_upper = None , population_density_lower = None , population_density_upper = None , land_area_in_sqmi_lower = None , land_area_in_sqmi_upper = N...
filters = list ( ) # by coordinates _n_radius_param_not_null = sum ( [ isinstance ( lat , ( integer_types , float ) ) , isinstance ( lng , ( integer_types , float ) ) , isinstance ( radius , ( integer_types , float ) ) , ] ) if _n_radius_param_not_null == 3 : flag_radius_query = True if radius <= 0 : # pragma :...
def strfdelta ( tdelta , fmt ) : """Used to format ` datetime . timedelta ` objects . Works just like ` strftime ` > > > strfdelta ( duration , ' % H : % M : % S ' ) param tdelta : Time duration which is an instance of datetime . timedelta param fmt : The pattern to format the timedelta with rtype : str"""
substitutes = dict ( ) hours , rem = divmod ( tdelta . total_seconds ( ) , 3600 ) minutes , seconds = divmod ( rem , 60 ) substitutes [ "H" ] = '{:02d}' . format ( int ( hours ) ) substitutes [ "M" ] = '{:02d}' . format ( int ( minutes ) ) substitutes [ "S" ] = '{:02d}' . format ( int ( seconds ) ) return DeltaTemplate...
def patch_installed ( name , advisory_ids = None , downloadonly = None , ** kwargs ) : '''. . versionadded : : 2017.7.0 Ensure that packages related to certain advisory ids are installed . Currently supported for the following pkg providers : : mod : ` yumpkg < salt . modules . yumpkg > ` and : mod : ` zypper...
ret = { 'name' : name , 'changes' : { } , 'result' : None , 'comment' : '' } if 'pkg.list_patches' not in __salt__ : ret [ 'result' ] = False ret [ 'comment' ] = 'The pkg.patch_installed state is not available on ' 'this platform' return ret if not advisory_ids and isinstance ( advisory_ids , list ) : r...
def vote_count ( self ) : """Returns the total number of votes cast for this poll options ."""
return Vote . objects . filter ( content_type = ContentType . objects . get_for_model ( self ) , object_id = self . id ) . aggregate ( Sum ( 'vote' ) ) [ 'vote__sum' ] or 0
def supervise ( project , workspace , namespace , workflow , sample_sets , recovery_file ) : """Supervise submission of jobs from a Firehose - style workflow of workflows"""
# Get arguments logging . info ( "Initializing FireCloud Supervisor..." ) logging . info ( "Saving recovery checkpoints to " + recovery_file ) # Parse workflow description # these three objects must be saved in order to recover the supervisor args = { 'project' : project , 'workspace' : workspace , 'namespace' : namesp...
def get_goids_sections ( sections ) : """Return all the GO IDs in a 2 - D sections list ."""
goids_all = set ( ) for _ , goids_sec in sections : goids_all |= set ( goids_sec ) return goids_all
def count ( cls , name ) : """Return the count of ` ` name ` `"""
counter = cls . collection . find_one ( { 'name' : name } ) or { } return counter . get ( 'seq' , 0 )
def extract_version_from_filename ( filename ) : """Extract version number from sdist filename ."""
filename = os . path . splitext ( os . path . basename ( filename ) ) [ 0 ] if filename . endswith ( '.tar' ) : filename = os . path . splitext ( filename ) [ 0 ] return filename . partition ( '-' ) [ 2 ]
def get ( self , key , default = '' , stringify = True ) : """Returns dictionary values or default . Args : key : string . Dictionary key to look up . default : string . Return this value if key not found . stringify : bool . Force all return values to string for compatibility reasons . Returns : pyth...
obj = self . __getitem__ ( key ) if obj is None : obj = default elif stringify : obj = str ( obj ) return obj
def _get_resampled ( self , rule , how = { 'ohlc' : 'last' , 'volume' : 'sum' } , df = None , ** kwargs ) : """Returns a resampled DataFrame Parameters rule : str the offset string or object representing target conversion for all aliases available see http : / / pandas . pydata . org / pandas - docs / stabl...
df = self . df . copy ( ) if df is None else df if rule == None : return df else : if isinstance ( how , dict ) : if 'ohlc' in how : v = how . pop ( 'ohlc' ) for _ in [ 'open' , 'high' , 'low' , 'close' ] : how [ _ ] = v _how = how . copy ( ) for _...
def get_languages ( self ) : """Get the list of languages we need to start servers and create clients for ."""
languages = [ 'python' ] all_options = CONF . options ( self . CONF_SECTION ) for option in all_options : if option in [ l . lower ( ) for l in LSP_LANGUAGES ] : languages . append ( option ) return languages
def tab ( self , n = 1 , interval = 0 , pre_dl = None , post_dl = None ) : """Tap ` ` tab ` ` key for ` ` n ` ` times , with ` ` interval ` ` seconds of interval . * * 中文文档 * * 以 ` ` interval ` ` 中定义的频率按下某个tab键 ` ` n ` ` 次 。"""
self . delay ( pre_dl ) self . k . tap_key ( self . k . tab_key , n , interval ) self . delay ( post_dl )
def iflatten ( L ) : """Iterative flatten ."""
for sublist in L : if hasattr ( sublist , '__iter__' ) : for item in iflatten ( sublist ) : yield item else : yield sublist
def generate_tokens ( self ) : """Tokenize the file , run physical line checks and yield tokens ."""
if self . _io_error : self . report_error ( 1 , 0 , 'E902 %s' % self . _io_error , readlines ) tokengen = tokenize . generate_tokens ( self . readline ) try : for token in tokengen : if token [ 2 ] [ 0 ] > self . total_lines : return self . noqa = token [ 4 ] and noqa ( token [ 4 ] )...
def _bigger_than_zero ( value : str ) -> int : """Type evaluator for argparse ."""
ivalue = int ( value ) if ivalue < 0 : raise ArgumentTypeError ( '{} should be bigger than 0' . format ( ivalue ) ) return ivalue
def validate ( self ) : """if schema exists we run shape file validation code of fiona by trying to save to in MemoryFile"""
if self . _schema is not None : with MemoryFile ( ) as memfile : with memfile . open ( driver = "ESRI Shapefile" , schema = self . schema ) as target : for _item in self . _results : # getting rid of the assets that don ' t behave well becasue of in memroy rasters item = GeoFeatu...