signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def to_api ( in_dict , int_keys = None , date_keys = None , bool_keys = None ) : """Extends a given object for API Production ."""
# Cast all int _ keys to int ( ) if int_keys : for in_key in int_keys : if ( in_key in in_dict ) and ( in_dict . get ( in_key , None ) is not None ) : in_dict [ in_key ] = int ( in_dict [ in_key ] ) # Cast all date _ keys to datetime . isoformat if date_keys : for in_key in date_keys : ...
def _processor ( self ) : """Application processor to setup session for every request"""
self . store . cleanup ( self . _config . timeout ) self . _load ( )
def string_to_locale ( value , strict = True ) : """Return an instance ` ` Locale ` ` corresponding to the string representation of a locale . @ param value : a string representation of a locale , i . e . , a ISO 639-3 alpha - 3 code ( or alpha - 2 code ) , optionally followed by a dash character ` ` - ` ` ...
try : return None if is_undefined ( value ) else Locale . from_string ( value , strict = strict ) except Locale . MalformedLocaleException , exception : if strict : raise exception
def execute ( self , conn , child_block_name = '' , child_lfn_list = [ ] , transaction = False ) : sql = '' binds = { } child_ds_name = '' child_where = '' if child_block_name : child_ds_name = child_block_name . split ( '#' ) [ 0 ] parent_where = " where d.dataset = :child_ds_name )...
def ploidy ( args ) : """% prog ploidy seqids layout Build a figure that calls graphics . karyotype to illustrate the high ploidy of B . napus genome ."""
p = OptionParser ( ploidy . __doc__ ) opts , args , iopts = p . set_image_options ( args , figsize = "8x7" ) if len ( args ) != 2 : sys . exit ( not p . print_help ( ) ) seqidsfile , klayout = args fig = plt . figure ( 1 , ( iopts . w , iopts . h ) ) root = fig . add_axes ( [ 0 , 0 , 1 , 1 ] ) Karyotype ( fig , roo...
def wite_to_json ( self , dir_path = "" , file_name = "" ) : """将性能数据写入文件 ."""
# 提取数据 data = { "plot_data" : self . record_thread . profile_data , "method_exec_info" : self . method_exec_info , "search_file" : self . search_file , "source_file" : self . source_file } # 写入文件 file_path = os . path . join ( dir_path , file_name ) if not os . path . exists ( dir_path ) : os . makedirs ( dir_path ...
def UpsertUserDefinedFunction ( self , collection_link , udf , options = None ) : """Upserts a user defined function in a collection . : param str collection _ link : The link to the collection . : param str udf : : param dict options : The request options for the request . : return : The upserted UDF...
if options is None : options = { } collection_id , path , udf = self . _GetContainerIdWithPathForUDF ( collection_link , udf ) return self . Upsert ( udf , path , 'udfs' , collection_id , None , options )
def check_strict ( self , name , original , loc , tokens ) : """Check that syntax meets - - strict requirements ."""
internal_assert ( len ( tokens ) == 1 , "invalid " + name + " tokens" , tokens ) if self . strict : raise self . make_err ( CoconutStyleError , "found " + name , original , loc ) else : return tokens [ 0 ]
def build_tree_from_json ( parent_node , sourcetree ) : """Recusively parse nodes in the list ` sourcetree ` and add them as children to the ` parent _ node ` . Usually called with ` parent _ node ` being a ` ChannelNode ` ."""
EXPECTED_NODE_TYPES = [ TOPIC_NODE , VIDEO_NODE , AUDIO_NODE , EXERCISE_NODE , DOCUMENT_NODE , HTML5_NODE ] for source_node in sourcetree : kind = source_node [ 'kind' ] if kind not in EXPECTED_NODE_TYPES : LOGGER . critical ( 'Unexpected node kind found: ' + kind ) raise NotImplementedError ( '...
def sub ( table , field , pattern , repl , count = 0 , flags = 0 ) : """Convenience function to convert values under the given field using a regular expression substitution . See also : func : ` re . sub ` ."""
prog = re . compile ( pattern , flags ) conv = lambda v : prog . sub ( repl , v , count = count ) return convert ( table , field , conv )
def association_generator ( self , file , skipheader = False , outfile = None ) -> Dict : """Returns a generator that yields successive associations from file Yields association"""
file = self . _ensure_file ( file ) for line in file : parsed_result = self . parse_line ( line ) self . report . report_parsed_result ( parsed_result , outfile , self . config . filtered_evidence_file , self . config . filter_out_evidence ) for association in parsed_result . associations : # yield associat...
def POST ( self ) : # pylint : disable = arguments - differ """Display main course list page"""
if not self . app . welcome_page : raise web . seeother ( "/courselist" ) return self . show_page ( self . app . welcome_page )
def validate_flavor_data ( self , expected , actual ) : """Validate flavor data . Validate a list of actual flavors vs a list of expected flavors ."""
self . log . debug ( 'Validating flavor data...' ) self . log . debug ( 'actual: {}' . format ( repr ( actual ) ) ) act = [ a . name for a in actual ] return self . _validate_list_data ( expected , act )
def manage_api_keys ( ) : """Page for viewing and creating API keys ."""
build = g . build create_form = forms . CreateApiKeyForm ( ) if create_form . validate_on_submit ( ) : api_key = models . ApiKey ( ) create_form . populate_obj ( api_key ) api_key . id = utils . human_uuid ( ) api_key . secret = utils . password_uuid ( ) save_admin_log ( build , created_api_key = Tr...
def put_nowait ( self , item ) : """Put an item into the queue without blocking . If no free slot is immediately available , raise QueueFull ."""
self . _parent . _check_closing ( ) with self . _parent . _sync_mutex : if self . _parent . _maxsize > 0 : if self . _parent . _qsize ( ) >= self . _parent . _maxsize : raise AsyncQueueFull self . _parent . _put_internal ( item ) self . _parent . _notify_async_not_empty ( threadsafe = Fa...
def _is_physical_entity ( pe ) : """Return True if the element is a physical entity"""
val = isinstance ( pe , _bp ( 'PhysicalEntity' ) ) or isinstance ( pe , _bpimpl ( 'PhysicalEntity' ) ) return val
def average_percentage_of_new_providers ( providers_info , providers_count ) : """Return the average percentage of new providers per segment and the average percentage of all projects ."""
segments_percentages = { } all_projects_percentages = [ ] providers_count = providers_count . to_dict ( ) [ 0 ] for _ , items in providers_info . groupby ( 'PRONAC' ) : cnpj_array = items [ 'nrCNPJCPF' ] . unique ( ) new_providers = 0 for cnpj in cnpj_array : cnpj_count = providers_count . get ( cnp...
def as_phononwebsite ( self ) : """Return a dictionary with the phononwebsite format : http : / / henriquemiranda . github . io / phononwebsite"""
d = { } # define the lattice d [ "lattice" ] = self . structure . lattice . _matrix . tolist ( ) # define atoms atom_pos_car = [ ] atom_pos_red = [ ] atom_types = [ ] for site in self . structure . sites : atom_pos_car . append ( site . coords . tolist ( ) ) atom_pos_red . append ( site . frac_coords . tolist (...
def get ( self , key : Text , locale : Optional [ Text ] ) -> List [ Tuple [ Text , ... ] ] : """Get a single set of intents ."""
locale = self . choose_locale ( locale ) return self . dict [ locale ] [ key ]
def _url_for_email ( endpoint , base_url = None , ** kw ) : """Create an external url _ for by using a custom base _ url different from the domain we are on : param endpoint : : param base _ url : : param kw : : return :"""
base_url = base_url or config ( "MAIL_EXTERNAL_BASE_URL" ) _external = True if not base_url else False url = url_for ( endpoint , _external = _external , ** kw ) if base_url and not _external : url = "%s/%s" % ( base_url . strip ( "/" ) , url . lstrip ( "/" ) ) return url
def copy ( self , with_time = True , ignore_standard_types = False ) : """Returns a deep copy of the Network object with all components and time - dependent data . Returns network : pypsa . Network Parameters with _ time : boolean , default True Copy snapshots and time - varying network . component _ na...
override_components , override_component_attrs = self . _retrieve_overridden_components ( ) network = self . __class__ ( ignore_standard_types = ignore_standard_types , override_components = override_components , override_component_attrs = override_component_attrs ) for component in self . iterate_components ( [ "Bus" ...
def subscribe_user_to_discussion ( recID , uid ) : """Subscribe a user to a discussion , so the she receives by emails all new new comments for this record . : param recID : record ID corresponding to the discussion we want to subscribe the user : param uid : user id"""
query = """INSERT INTO "cmtSUBSCRIPTION" (id_bibrec, id_user, creation_time) VALUES (%s, %s, %s)""" params = ( recID , uid , convert_datestruct_to_datetext ( time . localtime ( ) ) ) try : run_sql ( query , params ) except : return 0 return 1
def start_processor ( self ) : """* * Purpose * * : Method to start the wfp process . The wfp function is not to be accessed directly . The function is started in a separate process using this method ."""
if not self . _wfp_process : try : self . _prof . prof ( 'creating wfp process' , uid = self . _uid ) self . _wfp_process = Process ( target = self . _wfp , name = 'wfprocessor' ) self . _enqueue_thread = None self . _dequeue_thread = None self . _enqueue_thread_terminate = t...
def SaveName_Conv ( Mod = None , Cls = None , Type = None , Name = None , Deg = None , Exp = None , Diag = None , shot = None , version = None , usr = None , include = None ) : """Return a default name for saving the object Includes key info for fast identification of the object from file name Used on object cr...
Modstr = ID . _dModes [ Mod ] if Mod is not None else None include = ID . _defInclude if include is None else include if Cls is not None and Type is not None and 'Type' in include : Clsstr = Cls + Type else : Clsstr = Cls Dict = { 'Mod' : Modstr , 'Cls' : Clsstr , 'Name' : Name } for ii in include : if not ...
def relop_code ( self , relop , operands_type ) : """Returns code for relational operator relop - relational operator operands _ type - int or unsigned"""
code = self . RELATIONAL_DICT [ relop ] offset = 0 if operands_type == SharedData . TYPES . INT else len ( SharedData . RELATIONAL_OPERATORS ) return code + offset
async def stderr ( self ) -> AsyncGenerator [ str , None ] : """Asynchronous generator for lines from subprocess stderr ."""
await self . wait_running ( ) async for line in self . _subprocess . stderr : # type : ignore yield line
def nx_gen_node_values ( G , key , nodes , default = util_const . NoParam ) : """Generates attributes values of specific nodes"""
node_dict = nx_node_dict ( G ) if default is util_const . NoParam : return ( node_dict [ n ] [ key ] for n in nodes ) else : return ( node_dict [ n ] . get ( key , default ) for n in nodes )
def stopReceivingBoxes ( self , reason ) : """Stop observing log events ."""
AMP . stopReceivingBoxes ( self , reason ) log . removeObserver ( self . _emit )
def searchusers ( self , pattern ) : """Return a bugzilla User for the given list of patterns : arg pattern : List of patterns to match against . : returns : List of User records"""
return [ User ( self , ** rawuser ) for rawuser in self . _getusers ( match = pattern ) . get ( 'users' , [ ] ) ]
def get_html_output ( self ) : """Return line generator ."""
def html_splitlines ( lines ) : # this cool function was taken from trac . # http : / / projects . edgewall . com / trac / open_tag_re = re . compile ( r'<(\w+)(\s.*)?[^/]?>' ) close_tag_re = re . compile ( r'</(\w+)>' ) open_tags = [ ] for line in lines : for tag in open_tags : line...
def create ( self , name , volume , description = None , force = False ) : """Adds exception handling to the default create ( ) call ."""
try : snap = super ( CloudBlockStorageSnapshotManager , self ) . create ( name = name , volume = volume , description = description , force = force ) except exc . BadRequest as e : msg = str ( e ) if "Invalid volume: must be available" in msg : # The volume for the snapshot was attached . raise exc ...
def _get_template_texts ( source_list = None , template = 'jinja' , defaults = None , context = None , ** kwargs ) : '''Iterate a list of sources and process them as templates . Returns a list of ' chunks ' containing the rendered templates .'''
ret = { 'name' : '_get_template_texts' , 'changes' : { } , 'result' : True , 'comment' : '' , 'data' : [ ] } if source_list is None : return _error ( ret , '_get_template_texts called with empty source_list' ) txtl = [ ] for ( source , source_hash ) in source_list : context_dict = defaults if defaults else { } ...
def get_success_url ( self ) : """Returns the success URL to redirect the user to ."""
return reverse ( 'forum_conversation:topic' , kwargs = { 'forum_slug' : self . object . forum . slug , 'forum_pk' : self . object . forum . pk , 'slug' : self . object . slug , 'pk' : self . object . pk , } , )
def regex_replace ( arg , pattern , replacement ) : """Replaces match found by regex with replacement string . Replacement string can also be a regex Parameters pattern : string ( regular expression string ) replacement : string ( can be regular expression string ) Examples > > > import ibis > > > tab...
return ops . RegexReplace ( arg , pattern , replacement ) . to_expr ( )
def prompt_cfg ( self , msg , sec , name , ispass = False ) : """Prompt for a config value , optionally saving it to the user - level cfg . Only runs if we are in an interactive mode . @ param msg : Message to display to user . @ param sec : Section of config to add to . @ param name : Config item name . ...
shutit_global . shutit_global_object . yield_to_draw ( ) cfgstr = '[%s]/%s' % ( sec , name ) config_parser = self . config_parser usercfg = os . path . join ( self . host [ 'shutit_path' ] , 'config' ) self . log ( '\nPROMPTING FOR CONFIG: %s' % ( cfgstr , ) , transient = True , level = logging . INFO ) self . log ( '\...
def _pys2code ( self , line ) : """Updates code in pys code _ array"""
row , col , tab , code = self . _split_tidy ( line , maxsplit = 3 ) key = self . _get_key ( row , col , tab ) self . code_array . dict_grid [ key ] = unicode ( code , encoding = 'utf-8' )
def cartesian_to_spherical ( cartesian ) : """Convert cartesian to spherical coordinates passed as ( N , 3 ) shaped arrays ."""
xyz = cartesian xy = xyz [ : , 0 ] ** 2 + xyz [ : , 1 ] ** 2 r = np . sqrt ( xy + xyz [ : , 2 ] ** 2 ) phi = np . arctan2 ( np . sqrt ( xy ) , xyz [ : , 2 ] ) # for elevation angle defined from Z - axis down # ptsnew [ : , 4 ] = np . arctan2 ( xyz [ : , 2 ] , np . sqrt ( xy ) ) # for elevation angle defined from XY - p...
def ystep ( self ) : r"""Minimise Augmented Lagrangian with respect to : math : ` \ mathbf { y } ` ."""
self . Y = np . asarray ( sp . prox_l1l2 ( self . AX + self . U , ( self . lmbda / self . rho ) * self . wl1 , self . mu / self . rho , axis = - 1 ) , dtype = self . dtype ) GenericBPDN . ystep ( self )
def _next_page ( self ) : """Fetch the next page of the query ."""
if self . _last_page_seen : raise StopIteration new , self . _last_page_seen = self . conn . query_multiple ( self . object_type , self . _next_page_index , self . url_params , self . query_params ) self . _next_page_index += 1 if len ( new ) == 0 : self . _last_page_seen = True # don ' t bother with next p...
def draw_connected_scoped_label ( context , color , name_size , handle_pos , port_side , port_side_size , draw_connection_to_port = False ) : """Draw label of scoped variable This method draws the label of a scoped variable connected to a data port . This is represented by drawing a bigger label where the top p...
c = context . cairo c . set_line_width ( port_side_size * .03 ) c . set_source_rgb ( * color . to_floats ( ) ) rot_angle = .0 move_x = 0. move_y = 0. if port_side is SnappedSide . RIGHT : move_x = handle_pos . x + 2 * port_side_size move_y = handle_pos . y - name_size [ 1 ] / 2. c . move_to ( move_x + name_...
def from_gff3 ( path , attributes = None , region = None , score_fill = - 1 , phase_fill = - 1 , attributes_fill = '.' , dtype = None ) : """Read a feature table from a GFF3 format file . Parameters path : string File path . attributes : list of strings , optional List of columns to extract from the " att...
a = gff3_to_recarray ( path , attributes = attributes , region = region , score_fill = score_fill , phase_fill = phase_fill , attributes_fill = attributes_fill , dtype = dtype ) if a is None : return None else : return FeatureTable ( a , copy = False )
def set_condition ( self , value ) : """Setter for ' condition ' field . : param value - a new value of ' condition ' field . Required field . Must be a String ."""
if value is None or not isinstance ( value , str ) : raise TypeError ( "Condition is required and must be set to a String" ) else : self . __condition = value
def set_mmap ( self , mmap ) : """Enable / Disable use of a mapped file to simulate router memory . By default , a mapped file is used . This is a bit slower , but requires less memory . : param mmap : activate / deactivate mmap ( boolean )"""
if mmap : flag = 1 else : flag = 0 yield from self . _hypervisor . send ( 'vm set_ram_mmap "{name}" {mmap}' . format ( name = self . _name , mmap = flag ) ) if mmap : log . info ( 'Router "{name}" [{id}]: mmap enabled' . format ( name = self . _name , id = self . _id ) ) else : log . info ( 'Router "{na...
def id_to_fqname ( self , uuid , type = None ) : """Return fq _ name and type for uuid If ` type ` is provided check that uuid is actually a resource of type ` type ` . Raise HttpError if it ' s not the case . : param uuid : resource uuid : type uuid : UUIDv4 str : param type : resource type : type ty...
data = { "uuid" : uuid } result = self . post_json ( self . make_url ( "/id-to-fqname" ) , data ) result [ 'fq_name' ] = FQName ( result [ 'fq_name' ] ) if type is not None and not result [ 'type' ] . replace ( '_' , '-' ) == type : raise HttpError ( 'uuid %s not found for type %s' % ( uuid , type ) , http_status =...
def save ( self , filename , dataset_number = None , force = False , overwrite = True , extension = "h5" , ensure_step_table = None ) : """Save the data structure to cellpy - format . Args : filename : ( str ) the name you want to give the file dataset _ number : ( int ) if you have several datasets , chose t...
if ensure_step_table is None : ensure_step_table = self . ensure_step_table dataset_number = self . _validate_dataset_number ( dataset_number ) if dataset_number is None : self . logger . info ( "Saving test failed!" ) self . _report_empty_dataset ( ) return test = self . get_dataset ( dataset_number ) ...
def terminate ( self ) : """Terminate all the : attr : ` initialized _ providers ` ."""
logger . debug ( 'Terminating initialized providers' ) for name in list ( self . initialized_providers ) : del self [ name ]
def update_model_snapshot ( self , job_id , snapshot_id , body , params = None ) : """` < http : / / www . elastic . co / guide / en / elasticsearch / reference / current / ml - update - snapshot . html > ` _ : arg job _ id : The ID of the job to fetch : arg snapshot _ id : The ID of the snapshot to update : ...
for param in ( job_id , snapshot_id , body ) : if param in SKIP_IN_PATH : raise ValueError ( "Empty value passed for a required argument." ) return self . transport . perform_request ( "POST" , _make_path ( "_ml" , "anomaly_detectors" , job_id , "model_snapshots" , snapshot_id , "_update" , ) , params = par...
def on_install ( self , editor ) : """Extends : meth : ` pyqode . core . api . Mode . on _ install ` method to set the editor instance as the parent widget . . . warning : : Don ' t forget to call * * super * * if you override this method ! : param editor : editor instance : type editor : pyqode . core . ...
Mode . on_install ( self , editor ) self . setParent ( editor ) self . setPalette ( QtWidgets . QApplication . instance ( ) . palette ( ) ) self . setFont ( QtWidgets . QApplication . instance ( ) . font ( ) ) self . editor . panels . refresh ( ) self . _background_brush = QtGui . QBrush ( QtGui . QColor ( self . palet...
def reward ( self ) : """Returns a tuple of sum of raw and processed rewards ."""
raw_rewards , processed_rewards = 0 , 0 for ts in self . time_steps : # NOTE : raw _ reward and processed _ reward are None for the first time - step . if ts . raw_reward is not None : raw_rewards += ts . raw_reward if ts . processed_reward is not None : processed_rewards += ts . processed_rewar...
def match ( self , item ) : """Return ` ` True ` ` if the expected matchers are matched in any order , otherwise ` ` False ` ` ."""
if not self . _unused_matchers : raise RuntimeError ( 'Matcher exhausted, no more matchers to use' ) for matcher in self . _unused_matchers : if matcher ( item ) : self . _used_matchers . append ( matcher ) break if not self . _unused_matchers : # All patterns have been matched return True r...
def loadSharedResource ( self , pchResourceName , pchBuffer , unBufferLen ) : """Loads the specified resource into the provided buffer if large enough . Returns the size in bytes of the buffer required to hold the specified resource ."""
fn = self . function_table . loadSharedResource result = fn ( pchResourceName , pchBuffer , unBufferLen ) return result
def control ( self , key ) : """Send a control command ."""
if not self . connection : raise exceptions . ConnectionClosed ( ) payload = b"\x00\x00\x00" + self . _serialize_string ( key ) packet = b"\x00\x00\x00" + self . _serialize_string ( payload , True ) logging . info ( "Sending control command: %s" , key ) self . connection . send ( packet ) self . _read_response ( ) ...
def make_optimal_phenotype_grid ( environment , phenotypes ) : """Takes an EnvironmentFile object and a 2d array of phenotypes and returns a 2d array in which each location contains an index representing the distance between the phenotype in that location and the optimal phenotype for that location . This i...
world_size = environment . size phenotypes = deepcopy ( phenotypes ) for i in range ( world_size [ 1 ] ) : for j in range ( world_size [ 0 ] ) : for k in range ( len ( phenotypes [ i ] [ j ] ) ) : phenotype = phenotype_to_res_set ( phenotypes [ i ] [ j ] [ k ] , environment . tasks ) ...
def masked ( a , b ) : """Return a numpy array with values from a where elements in b are not False . Populate with numpy . nan where b is False . When plotting , those elements look like missing , which can be a desired result ."""
if np . any ( [ a . dtype . kind . startswith ( c ) for c in [ 'i' , 'u' , 'f' , 'c' ] ] ) : n = np . array ( [ np . nan for i in range ( len ( a ) ) ] ) else : n = np . array ( [ None for i in range ( len ( a ) ) ] ) # a = a . astype ( object ) return np . where ( b , a , n )
def subscribe_to_address_webhook ( callback_url , subscription_address , event = 'tx-confirmation' , confirmations = 0 , confidence = 0.00 , coin_symbol = 'btc' , api_key = None ) : '''Subscribe to transaction webhooks on a given address . Webhooks for transaction broadcast and each confirmation ( up to 6 ) . R...
assert is_valid_coin_symbol ( coin_symbol ) assert is_valid_address_for_coinsymbol ( subscription_address , coin_symbol ) assert api_key , 'api_key required' url = make_url ( coin_symbol , 'hooks' ) params = { 'token' : api_key } data = { 'event' : event , 'url' : callback_url , 'address' : subscription_address , } if ...
def load ( self , dtype_conversion = None ) : """Load the data table and corresponding validation schema . Parameters dtype _ conversion : dict Column names as keys and corresponding type for loading the data . Please take a look at the ` pandas documentation < https : / / pandas . pydata . org / pandas -...
if dtype_conversion is None : dtype_conversion = { "growth" : str } super ( GrowthExperiment , self ) . load ( dtype_conversion = dtype_conversion ) self . data [ "growth" ] = self . data [ "growth" ] . isin ( self . TRUTHY )
def transact ( self , contract_method : ContractFunction , ) : """A wrapper around to _ be _ called . transact ( ) that waits until the transaction succeeds ."""
txhash = contract_method . transact ( self . transaction ) LOG . debug ( f'Sending txHash={encode_hex(txhash)}' ) ( receipt , _ ) = check_successful_tx ( web3 = self . web3 , txid = txhash , timeout = self . wait , ) return receipt
def removeChildren ( self , child_ids ) : """Remove children from current workitem : param child _ ids : a : class : ` list ` contains the children workitem id / number ( integer or equivalent string )"""
if not hasattr ( child_ids , "__iter__" ) : error_msg = "Input parameter 'child_ids' is not iterable" self . log . error ( error_msg ) raise exception . BadValue ( error_msg ) self . log . debug ( "Try to remove children <Workitem %s> from current " "<Workitem %s>" , child_ids , self ) self . _removeChildre...
def _get_event_source_status ( awsclient , evt_source , lambda_arn ) : """Given an event _ source dictionary , create the object and get the event source status ."""
event_source_obj = _get_event_source_obj ( awsclient , evt_source ) return event_source_obj . status ( lambda_arn )
def insert ( self , crc , toc ) : """Save a new cache to file"""
if self . _rw_cache : try : filename = '%s/%08X.json' % ( self . _rw_cache , crc ) cache = open ( filename , 'w' ) cache . write ( json . dumps ( toc , indent = 2 , default = self . _encoder ) ) cache . close ( ) logger . info ( 'Saved cache to [%s]' , filename ) self...
def reference ( self , referencing_path = None ) : """How to reference this address in a BUILD file . : API : public"""
if referencing_path is not None and self . _spec_path == referencing_path : return self . relative_spec elif os . path . basename ( self . _spec_path ) != self . _target_name : return self . spec else : return self . _spec_path
def parse_command_line ( ) : """Parses the command line and returns a ` ` Namespace ` ` object containing options and their values . : return : A ` ` Namespace ` ` object containing options and their values ."""
import argparse parser = argparse . ArgumentParser ( description = __doc__ . split ( "\n" ) [ 0 ] ) parser . add_argument ( '-v' , '--version' , action = 'version' , version = '%(prog)s ' + __version__ , help = "Show version number and exit." ) parser . add_argument ( 'input_filename' , metavar = 'INPUT_FILE' , type = ...
def lub ( self , other ) : """Return the least upper bound for given intervals . : param other : AbstractInterval instance"""
return self . __class__ ( [ max ( self . lower , other . lower ) , max ( self . upper , other . upper ) , ] , lower_inc = self . lower_inc if self < other else other . lower_inc , upper_inc = self . upper_inc if self > other else other . upper_inc , )
def create_dir ( self , directory_path , perm_bits = PERM_DEF ) : """Create ` directory _ path ` , and all the parent directories . Helper method to set up your test faster . Args : directory _ path : The full directory path to create . perm _ bits : The permission bits as set by ` chmod ` . Returns : T...
directory_path = self . make_string_path ( directory_path ) directory_path = self . absnormpath ( directory_path ) self . _auto_mount_drive_if_needed ( directory_path ) if self . exists ( directory_path , check_link = True ) : self . raise_os_error ( errno . EEXIST , directory_path ) path_components = self . _path_...
def roundness ( self , value ) : """Set the roundness of the vowel . : param str value : the value to be set"""
if ( value is not None ) and ( not value in DG_V_ROUNDNESS ) : raise ValueError ( "Unrecognized value for roundness: '%s'" % value ) self . __roundness = value
def parsewarn ( self , msg , line = None ) : """Emit parse warning ."""
if line is None : line = self . sline self . dowarn ( 'warning: ' + msg + ' on line {}' . format ( line ) )
def media ( self ) : """Access the media : returns : twilio . rest . fax . v1 . fax . fax _ media . FaxMediaList : rtype : twilio . rest . fax . v1 . fax . fax _ media . FaxMediaList"""
if self . _media is None : self . _media = FaxMediaList ( self . _version , fax_sid = self . _solution [ 'sid' ] , ) return self . _media
def create_color_stops ( breaks , colors = 'RdYlGn' , color_ramps = color_ramps ) : """Convert a list of breaks into color stops using colors from colorBrewer or a custom list of color values in RGB , RGBA , HSL , CSS text , or HEX format . See www . colorbrewer2 . org for a list of color options to pass"""
num_breaks = len ( breaks ) stops = [ ] if isinstance ( colors , list ) : # Check if colors contain a list of color values if len ( colors ) == 0 or len ( colors ) != num_breaks : raise ValueError ( 'custom color list must be of same length as breaks list' ) for color in colors : # Check if color is val...
def wait_for_event ( self , event , timeout = 10 ) : """Block waiting for the given event . Returns the event params . : param event : The event to handle . : return : The event params . : param timeout : The maximum time to wait before raising : exc : ` . TimeoutError ` ."""
return self . __handler . wait_for_event ( event , timeout = timeout )
def _compile_mapping ( self , schema , invalid_msg = None ) : """Create validator for given mapping ."""
invalid_msg = invalid_msg or 'mapping value' # Keys that may be required all_required_keys = set ( key for key in schema if key is not Extra and ( ( self . required and not isinstance ( key , ( Optional , Remove ) ) ) or isinstance ( key , Required ) ) ) # Keys that may have defaults all_default_keys = set ( key for ke...
def functions ( self ) : """Returns a generator of all standalone functions in the file , in textual order . > > > file = FileDoc ( ' module . js ' , read _ file ( ' examples / module . js ' ) ) > > > list ( file . functions ) [ 0 ] . name ' the _ first _ function ' > > > list ( file . functions ) [ 3 ] ....
def is_function ( comment ) : return isinstance ( comment , FunctionDoc ) and not comment . member return self . _filtered_iter ( is_function )
def file_ns_handler ( importer , path_item , packageName , module ) : """Compute an ns - package subpath for a filesystem or zipfile importer"""
subpath = os . path . join ( path_item , packageName . split ( '.' ) [ - 1 ] ) normalized = _normalize_cached ( subpath ) for item in module . __path__ : if _normalize_cached ( item ) == normalized : break else : # Only return the path if it ' s not already there return subpath
def combine_related ( self , return_toplevel = True , poolsize = None , size_cutoff = 100 ) : """Connect related statements based on their refinement relationships . This function takes as a starting point the unique statements ( with duplicates removed ) and returns a modified flat list of statements contain...
if self . related_stmts is not None : if return_toplevel : return self . related_stmts else : assert self . unique_stmts is not None return self . unique_stmts # Call combine _ duplicates , which lazily initializes self . unique _ stmts unique_stmts = self . combine_duplicates ( ) # Gene...
def _preprocess_scan_params ( self , xml_params ) : """Processes the scan parameters ."""
params = { } for param in xml_params : params [ param . tag ] = param . text or '' # Set default values . for key in self . scanner_params : if key not in params : params [ key ] = self . get_scanner_param_default ( key ) if self . get_scanner_param_type ( key ) == 'selection' : para...
def _op_generic_pack_StoU_saturation ( self , args , src_size , dst_size ) : """Generic pack with unsigned saturation . Split args in chunks of src _ size signed bits and in pack them into unsigned saturated chunks of dst _ size bits . Then chunks are concatenated resulting in a BV of len ( args ) * dst _ size ...
if src_size <= 0 or dst_size <= 0 : raise SimOperationError ( "Can't pack from or to zero or negative size" % self . name ) result = None max_value = claripy . BVV ( - 1 , dst_size ) . zero_extend ( src_size - dst_size ) # max value for unsigned saturation min_value = claripy . BVV ( 0 , src_size ) # min unsigned v...
def _prepare_version ( self ) : """Setup the application version"""
if config . VERSION not in self . _config : self . _config [ config . VERSION ] = __version__
def cleanup ( self ) : '''remove sockets on shutdown'''
log . debug ( 'ConCache cleaning up' ) if os . path . exists ( self . cache_sock ) : os . remove ( self . cache_sock ) if os . path . exists ( self . update_sock ) : os . remove ( self . update_sock ) if os . path . exists ( self . upd_t_sock ) : os . remove ( self . upd_t_sock )
def function ( x , ax , ay ) : '''general square root function'''
with np . errstate ( invalid = 'ignore' ) : return ay * ( x - ax ) ** 0.5
def _dict_from_lines ( lines , key_nums , sep = None ) : """Helper function to parse formatted text structured like : value1 value2 . . . sep key1 , key2 . . . key _ nums is a list giving the number of keys for each line . 0 if line should be skipped . sep is a string denoting the character that separates the...
if is_string ( lines ) : lines = [ lines ] if not isinstance ( key_nums , collections . abc . Iterable ) : key_nums = list ( key_nums ) if len ( lines ) != len ( key_nums ) : err_msg = "lines = %s\n key_num = %s" % ( str ( lines ) , str ( key_nums ) ) raise ValueError ( err_msg ) kwargs = Namespace ( )...
def connect_to_endpoints_nowait ( self , * endpoints : ConnectionConfig ) -> None : """Connect to the given endpoints as soon as they become available but do not block ."""
self . _throw_if_already_connected ( * endpoints ) for endpoint in endpoints : asyncio . ensure_future ( self . _await_connect_to_endpoint ( endpoint ) )
def handle_bail ( self , bail ) : """Handle a bail line ."""
self . _add_error ( _ ( "Bailed: {reason}" ) . format ( reason = bail . reason ) )
def _local_pauli_eig_meas ( op , idx ) : """Generate gate sequence to measure in the eigenbasis of a Pauli operator , assuming we are only able to measure in the Z eigenbasis . ( Note : The unitary operations of this Program are essentially the Hermitian conjugates of those in : py : func : ` _ one _ q _ pauli ...
if op == 'X' : return Program ( RY ( - pi / 2 , idx ) ) elif op == 'Y' : return Program ( RX ( pi / 2 , idx ) ) elif op == 'Z' : return Program ( ) raise ValueError ( f'Unknown operation {op}' )
def format_all ( format_string , env ) : """Format the input string using each possible combination of lists in the provided environment . Returns a list of formated strings ."""
prepared_env = parse_pattern ( format_string , env , lambda x , y : [ FormatWrapper ( x , z ) for z in y ] ) # Generate each possible combination , format the string with it and yield # the resulting string : for field_values in product ( * prepared_env . itervalues ( ) ) : format_env = dict ( izip ( prepared_env ....
def parse_user ( raw ) : """Parse nick ( ! user ( @ host ) ? ) ? structure ."""
nick = raw user = None host = None # Attempt to extract host . if protocol . HOST_SEPARATOR in raw : raw , host = raw . split ( protocol . HOST_SEPARATOR ) # Attempt to extract user . if protocol . USER_SEPARATOR in raw : nick , user = raw . split ( protocol . USER_SEPARATOR ) return nick , user , host
def sniff_iface ( f ) : """Ensure decorated function is called with a value for iface . If no iface provided , inject net iface inferred from unit private address ."""
def iface_sniffer ( * args , ** kwargs ) : if not kwargs . get ( 'iface' , None ) : kwargs [ 'iface' ] = get_iface_from_addr ( unit_get ( 'private-address' ) ) return f ( * args , ** kwargs ) return iface_sniffer
def request_param_update ( self , complete_name ) : """Request an update of the value for the supplied parameter ."""
self . param_updater . request_param_update ( self . toc . get_element_id ( complete_name ) )
def legislators ( request , abbr ) : '''Context : - metadata - chamber - chamber _ title - chamber _ select _ template - chamber _ select _ collection - chamber _ select _ chambers - show _ chamber _ column - abbr - legislators - sort _ order - sort _ key - legislator _ table - nav _ activ...
try : meta = Metadata . get_object ( abbr ) except DoesNotExist : raise Http404 spec = { 'active' : True , 'district' : { '$exists' : True } } chambers = dict ( ( k , v [ 'name' ] ) for k , v in meta [ 'chambers' ] . items ( ) ) chamber = request . GET . get ( 'chamber' , 'both' ) if chamber in chambers : s...
def request ( self , location , fragment_enc = False ) : """Given a URL this method will add a fragment , a query part or extend a query part if it already exists with the information in this instance . : param location : A URL : param fragment _ enc : Whether the information should be placed in a fragment ...
_l = as_unicode ( location ) _qp = as_unicode ( self . to_urlencoded ( ) ) if fragment_enc : return "%s#%s" % ( _l , _qp ) else : if "?" in location : return "%s&%s" % ( _l , _qp ) else : return "%s?%s" % ( _l , _qp )
def redirect ( to , * args , ** kwargs ) : """Similar to the Django ` ` redirect ` ` shortcut but with altered functionality . If an optional ` ` params ` ` argument is provided , the dictionary items will be injected as query parameters on the redirection URL ."""
params = kwargs . pop ( 'params' , { } ) try : to = reverse ( to , args = args , kwargs = kwargs ) except NoReverseMatch : if '/' not in to and '.' not in to : to = reverse ( 'cas_login' ) elif not service_allowed ( to ) : raise PermissionDenied ( ) if params : to = add_query_params ( to...
def _poll_for_refresh ( self , check_id ) : """Given a Trusted Advisor check _ id that has just been refreshed , poll until the refresh is complete . Once complete , return the check result . : param check _ id : the Trusted Advisor check ID : type check _ id : str : returns : dict check result . The return...
logger . warning ( 'Polling for TA check %s refresh...' , check_id ) if self . refresh_timeout is None : # no timeout . . . cutoff = datetime_now ( ) + timedelta ( days = 365 ) else : cutoff = datetime_now ( ) + timedelta ( seconds = self . refresh_timeout ) last_status = None while datetime_now ( ) <= cutoff :...
def compute_grouped_metric ( ungrouped_metric , group_matrix ) : '''Computes the mean value for the groups of parameter values in the argument ungrouped _ metric'''
group_matrix = np . array ( group_matrix , dtype = np . bool ) mu_star_masked = np . ma . masked_array ( ungrouped_metric * group_matrix . T , mask = ( group_matrix ^ 1 ) . T ) mean_of_mu_star = np . ma . mean ( mu_star_masked , axis = 1 ) return mean_of_mu_star
def _explode_raster ( raster , band_names = [ ] ) : # type : ( _ Raster , Iterable [ str ] ) - > List [ _ Raster ] """Splits a raster into multiband rasters ."""
# Using band _ names = [ ] does no harm because we are not mutating it in place # and it makes MyPy happy if not band_names : band_names = raster . band_names else : band_names = list ( IndexedSet ( raster . band_names ) . intersection ( band_names ) ) return [ _Raster ( image = raster . bands_data ( [ band_nam...
def daily_pr_intensity ( pr , thresh = '1 mm/day' , freq = 'YS' ) : r"""Average daily precipitation intensity Return the average precipitation over wet days . Parameters pr : xarray . DataArray Daily precipitation [ mm / d or kg / m2 / s ] thresh : str precipitation value over which a day is considered ...
t = utils . convert_units_to ( thresh , pr , 'hydro' ) # put pr = 0 for non wet - days pr_wd = xr . where ( pr >= t , pr , 0 ) pr_wd . attrs [ 'units' ] = pr . units # sum over wanted period s = pr_wd . resample ( time = freq ) . sum ( dim = 'time' , keep_attrs = True ) sd = utils . pint_multiply ( s , 1 * units . day ...
def checkSimbad ( g , target , maxobj = 5 , timeout = 5 ) : """Sends off a request to Simbad to check whether a target is recognised . Returns with a list of results , or raises an exception if it times out"""
url = 'http://simbad.u-strasbg.fr/simbad/sim-script' q = 'set limit ' + str ( maxobj ) + '\nformat object form1 "Target: %IDLIST(1) | %COO(A D;ICRS)"\nquery ' + target query = urllib . parse . urlencode ( { 'submit' : 'submit script' , 'script' : q } ) resp = urllib . request . urlopen ( url , query . encode ( ) , time...
def run ( self ) : """Calls the main function of a plugin and mutates the output dict with its return value . Provides an easy way to change the output whilst not needing to constantly poll a queue in another thread and allowing plugin ' s to manage their own intervals ."""
self . running = True while self . running : ret = self . func ( ) self . output_dict [ ret [ 'name' ] ] = ret time . sleep ( self . interval ) return
def writeline ( self , data , crlf = "\n" ) : # pylint : disable = arguments - differ """Write data to process . : param data : data to write : param crlf : line end character : return : Nothing"""
GenericProcess . writeline ( self , data , crlf = crlf )
def proxy_global ( name , no_expand_macro = False , fname = 'func' , args = ( ) ) : """Used to automatically asrootpy ROOT ' s thread local variables"""
if no_expand_macro : # pragma : no cover # handle older ROOT versions without _ ExpandMacroFunction wrapping @ property def gSomething_no_func ( self ) : glob = self ( getattr ( ROOT , name ) ) # create a fake func ( ) that just returns self def func ( ) : return glob ...
def get_item ( self , table_name , key , attributes_to_get = None , consistent_read = False , object_hook = None ) : """Return a set of attributes for an item that matches the supplied key . : type table _ name : str : param table _ name : The name of the table containing the item . : type key : dict : pa...
data = { 'TableName' : table_name , 'Key' : key } if attributes_to_get : data [ 'AttributesToGet' ] = attributes_to_get if consistent_read : data [ 'ConsistentRead' ] = True json_input = json . dumps ( data ) response = self . make_request ( 'GetItem' , json_input , object_hook = object_hook ) if not response ....
def split_file ( splitNum , fileInput , lines ) : """split _ file is used to split fileInput into splitNum small pieces file . For example , when splitNum is 56 , a 112 lines file will be split into 56 files and each file has 2 lines . : param splitNum : split into splitNum files : param fileInput : file to b...
quot = lines // splitNum rema = lines % splitNum files = [ ] current_line = 0 for i in range ( splitNum ) : if i < rema : read_line = quot + 1 else : read_line = quot temp = tempfile . NamedTemporaryFile ( ) os . system ( "head -n%d %s| tail -n%d > %s" % ( current_line + read_line , file...
def quasi_newton_uniform ( points , cells , * args , ** kwargs ) : """Like linear _ solve above , but assuming rho = = 1 . Note that the energy gradient \\ partial E _ i = 2 / ( d + 1 ) sum _ { tau _ j in omega _ i } ( x _ i - b _ j ) \\ int _ { tau _ j } rho becomes \\ partial E _ i = 2 / ( d + 1 ) sum _ { t...
def get_new_points ( mesh ) : # do one Newton step # TODO need copy ? x = mesh . node_coords . copy ( ) cells = mesh . cells [ "nodes" ] jac_x = jac_uniform ( x , cells ) x -= solve_hessian_approx_uniform ( x , cells , jac_x ) return x mesh = MeshTri ( points , cells ) runner ( get_new_points , mesh...