matlok - Python Src Code Datasets (base)
Collection
Python code from leading AI research and tools repositories • 2 items • Updated • 1
name stringlengths 1 94 | class_name stringlengths 1 56 | class_bases stringlengths 0 73 ⌀ | is_member bool 2
classes | args stringlengths 0 1.04k | class_docstr stringlengths 0 13k ⌀ | class_docstr_tok stringlengths 2 18.2k ⌀ | docstr stringlengths 0 6.72k ⌀ | docstr_tok stringlengths 13 9.01k ⌀ | returns stringlengths 1 279 ⌀ | code stringlengths 20 27.8k ⌀ | code_tok stringlengths 32 38.4k ⌀ | lstart int64 1 1.03k | lend int64 2 1.18k | raises stringclasses 9
values | filename stringlengths 5 81 | file_path stringlengths 14 129 | imports stringlengths 0 786 | total_objects int64 7 10 | num_classes float64 1 4 ⌀ | num_imports int64 0 9 | num_all_bases float64 0 4 ⌀ | num_bases float64 1 4 ⌀ | num_functions int64 0 10 | num_methods float64 1 8 ⌀ | label_desc stringlengths 70 1.25k | label_desc_len int64 70 1.25k | label_id stringclasses 10
values | __index_level_0__ int64 0 514k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
test_da_tokenizer_handles_dates | global | null | false | da_tokenizer,text | null | null | null | null | null | def test_da_tokenizer_handles_dates(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
| ["def","test_da_tokenizer_handles_dates","(","da_tokenizer",",","text",")",":","tokens","=","da_tokenizer","(","text",")","assert","len","(","tokens",")","==","1"] | 17 | 19 | null | test_exceptions.py | spacy/spacy/tests/lang/da/test_exceptions.py | import pytest | 7 | null | 1 | null | null | 6 | null | Use image node_id 3 for calling a global function with example usage: test_da_tokenizer_handles_dates(da_tokenizer, text) without return types | 142 | node_id 3 | 387,996 |
test_da_tokenizer_handles_exc_in_text | global | null | false | da_tokenizer | null | null | null | null | null | def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
| ["def","test_da_tokenizer_handles_exc_in_text","(","da_tokenizer",")",":","text","=","``","Det","er","bl.a",".","ikke","meningen","''","tokens","=","da_tokenizer","(","text",")","assert","len","(","tokens",")","==","5","assert","tokens","[","2","]",".text","==","``","bl.a",".","''"] | 22 | 26 | null | test_exceptions.py | spacy/spacy/tests/lang/da/test_exceptions.py | import pytest | 7 | null | 1 | null | null | 6 | null | Use image node_id 4 for calling a global function with example usage: test_da_tokenizer_handles_exc_in_text(da_tokenizer) without return types | 142 | node_id 4 | 387,997 |
test_da_tokenizer_handles_custom_base_exc | global | null | false | da_tokenizer | null | null | null | null | null | def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
text = "Her er noget du kan kigge i."
tokens = da_tokenizer(text)
assert len(tokens) == 8
assert tokens[6].text == "i"
assert tokens[7].text == "."
| ["def","test_da_tokenizer_handles_custom_base_exc","(","da_tokenizer",")",":","text","=","``","Her","er","noget","du","kan","kigge","i",".","''","tokens","=","da_tokenizer","(","text",")","assert","len","(","tokens",")","==","8","assert","tokens","[","6","]",".text","==","``","i","''","assert","tokens","[","7","]",".te... | 29 | 34 | null | test_exceptions.py | spacy/spacy/tests/lang/da/test_exceptions.py | import pytest | 7 | null | 1 | null | null | 6 | null | Use image node_id 5 for calling a global function with example usage: test_da_tokenizer_handles_custom_base_exc(da_tokenizer) without return types | 146 | node_id 5 | 387,998 |
test_da_tokenizer_slash | global | null | false | da_tokenizer,text,n_tokens | null | null | null | null | null | def test_da_tokenizer_slash(da_tokenizer, text, n_tokens):
tokens = da_tokenizer(text)
assert len(tokens) == n_tokens
| ["def","test_da_tokenizer_slash","(","da_tokenizer",",","text",",","n_tokens",")",":","tokens","=","da_tokenizer","(","text",")","assert","len","(","tokens",")","==","n_tokens"] | 57 | 59 | null | test_exceptions.py | spacy/spacy/tests/lang/da/test_exceptions.py | import pytest | 7 | null | 1 | null | null | 6 | null | Use image node_id 6 for calling a global function with example usage: test_da_tokenizer_slash(da_tokenizer, text, n_tokens) without return types | 144 | node_id 6 | 387,999 |
debug | ExecMerger | Executor | true | self,docs_matrix | null | null | null | null | result | def debug(self, docs_matrix: List[DocumentArray], **kwargs):
self.logger.debug(
f"received doc matrix in exec-merger with length {len(docs_matrix)}."
)
result = DocumentArray()
for docs in zip(*docs_matrix):
traversed_executors = [
doc.tags["traversed-executors"] for doc in ... | ["def","debug","(","self",",","docs_matrix",":","List","[","DocumentArray","]",",","*","*","kwargs",")",":","self.logger.debug","(","f","''","received","doc","matrix","in","exec-merger","with","length","{","len","(","docs_matrix",")","}",".","''",")","result","=","DocumentArray","(",")","for","docs","in","zip","(","*",... | 15 | 36 | null | exec_merger.py | jina/tests/docker_compose/executor-merger/exec_merger.py | from itertools import chain
from typing import Dict, List
from jina import Document, DocumentArray, Executor, requests | 7 | 1 | 3 | 1 | 1 | 0 | 2 | Use image node_id 2 for calling the ExecMerger obj's underlying member method code with example usage: obj.debug(docs_matrix) and returns: result | 145 | node_id 2 | 156,984 |
__init__ | ExecMerger | Executor | true | self | null | null | null | null | ExecMerger | def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
from jina.logging.logger import JinaLogger
self.logger = JinaLogger(self.__class__.__name__)
| ["def","__init__","(","self",",","*","args",",","*","*","kwargs",")",":","super","(",")",".__init__","(","*","args",",","*","*","kwargs",")","from","jina.logging.logger","import","JinaLogger","self.logger","=","JinaLogger","(","self.__class__.__name__",")"] | 8 | 12 | null | exec_merger.py | jina/tests/docker_compose/executor-merger/exec_merger.py | from itertools import chain
from typing import Dict, List
from jina import Document, DocumentArray, Executor, requests | 7 | 1 | 3 | 1 | 1 | 0 | 2 | Use image node_id 1 to create a new ExecMerger object from inherited base classes: Executor with example: obj = ExecMerger() | 124 | node_id 1 | 156,983 |
_qencode | global | null | false | s | null | null | null | null | enc | def _qencode(s):
enc = _encodestring(s, quotetabs=True)
# Must encode spaces, which quopri.encodestring() doesn't do
return enc.replace(b" ", b"=20")
| ["def","_qencode","(","s",")",":","enc","=","_encodestring","(","s",",","quotetabs=True",")","#","Must","encode","spaces",",","which","quopri.encodestring","(",")","does","n't","do","return","enc.replace","(","b","''","``",",","b","''","=20","''",")"] | 19 | 22 | null | encoders.py | cpython/Lib/email/encoders.py | from base64 import encodebytes
from quopri import encodestring | 7 | null | 2 | null | null | 5 | null | Use image node_id 1 for calling a global function with example usage: _qencode(s) and returns: enc | 98 | node_id 1 | 52,444 |
run | global | null | false | cmd,stdout | null | null | null | null | null | def run(cmd, stdout=True):
pipe = None if stdout else subprocess.DEVNULL
try:
subprocess.check_call(cmd, stdout=pipe, stderr=pipe)
except subprocess.CalledProcessError:
print("\n! Error executing: `%s;` aborting" % " ".join(cmd))
sys.exit(1)
| ["def","run","(","cmd",",","stdout=True",")",":","pipe","=","None","if","stdout","else","subprocess.DEVNULL","try",":","subprocess.check_call","(","cmd",",","stdout=pipe",",","stderr=pipe",")","except","subprocess.CalledProcessError",":","print","(","``","\\n","!","Error","executing",":","`","%","s",";","`","aborting",... | 42 | 48 | null | push_docs_to_repo.py | numpy/tools/ci/push_docs_to_repo.py | import argparse
import subprocess
import tempfile
import os
import sys
import shutil | 7 | null | 6 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: run(cmd, stdout) without return types | 107 | node_id 1 | 214,130 |
intlist | global | null | false | obj | null | null | null | null | unknown | def intlist(obj):
return [int(v) for v in obj]
| ["def","intlist","(","obj",")",":","return","[","int","(","v",")","for","v","in","obj","]"] | 14 | 15 | null | test_bloom.py | redis-py/tests/test_asyncio/test_bloom.py | from math import inf
import pytest
import redis.asyncio
from redis.exceptions import ModuleError, RedisError
from redis.utils import HIREDIS_AVAILABLE
from tests.conftest import assert_resp_response, is_resp2_connection, skip_ifmodversion_lt | 7 | null | 6 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: intlist(obj) and returns: unknown | 103 | node_id 1 | 339,428 |
create_item | global | null | false | item | null | null | null | null | item | def create_item(item: Item):
return item
| ["def","create_item","(","item",":","Item",")",":","return","item"] | 16 | 17 | null | tutorial001.py | fastapi/docs_src/separate_openapi_schemas/tutorial001.py | from typing import List, Union
from fastapi import FastAPI
from pydantic import BaseModel | 7 | null | 3 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: create_item(item) and returns: item | 105 | node_id 1 | 135,814 |
parse_args | global | null | false | null | null | null | null | parser | def parse_args():
"""
Helper function parsing the command line options
@retval ArgumentParser
"""
parser = ArgumentParser(
description=(
"PyTorch TPU distributed training launch "
"helper utility that will spawn up "
"multiple distributed processes"
... | ["def","parse_args","(",")",":","``","''","''","Helper","function","parsing","the","command","line","options","@","retval","ArgumentParser","``","''","''","parser","=","ArgumentParser","(","description=","(","``","PyTorch","TPU","distributed","training","launch","``","``","helper","utility","that","will","spawn","up","... | 36 | 67 | null | xla_spawn.py | accelerate/tests/xla_spawn.py | import importlib
import sys
from argparse import REMAINDER, ArgumentParser
from pathlib import Path
import torch_xla.distributed.xla_multiprocessing | 7 | null | 5 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: parse_args() and returns: parser | 102 | node_id 1 | 1,215 | |
test_suite | global | null | false | null | null | null | null | unittest | def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
| ["def","test_suite","(",")",":","return","unittest.TestLoader","(",")",".loadTestsFromName","(","__name__",")"] | 43 | 44 | null | test_bugX000.py | psycopg2/tests/test_bugX000.py | import psycopg2
import time
import unittest | 7 | null | 3 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: test_suite() and returns: unittest | 104 | node_id 1 | 256,395 | |
test_date_time_allocation_bug | DateTimeAllocationBugTestCase | unittest | true | self | null | null | null | null | null | def test_date_time_allocation_bug(self):
d1 = psycopg2.Date(2002, 12, 25)
d2 = psycopg2.DateFromTicks(
time.mktime((2002, 12, 25, 0, 0, 0, 0, 0, 0))
)
t1 = psycopg2.Time(13, 45, 30)
t2 = psycopg2.TimeFromTicks(
time.mktime((2001, 1, 1, 13, 45, 30, 0, 0, 0))
)
t1 = psycopg2.Ti... | ["def","test_date_time_allocation_bug","(","self",")",":","d1","=","psycopg2.Date","(","2002",",","12",",","25",")","d2","=","psycopg2.DateFromTicks","(","time.mktime","(","(","2002",",","12",",","25",",","0",",","0",",","0",",","0",",","0",",","0",")",")",")","t1","=","psycopg2.Time","(","13",",","45",",","30",")","t2... | 32 | 40 | null | test_bugX000.py | psycopg2/tests/test_bugX000.py | import psycopg2
import time
import unittest | 7 | 1 | 3 | 1 | 1 | 1 | 1 | Use image node_id 1 for calling the DateTimeAllocationBugTestCase obj's underlying member method code with example usage: obj.test_date_time_allocation_bug() without return types | 178 | node_id 1 | 256,394 |
make_ax | global | null | false | grid | null | null | null | null | ax | def make_ax(grid=False):
ax = plt.figure().add_subplot(projection="3d")
ax.grid(grid)
ax.set_axis_off()
return ax
| ["def","make_ax","(","grid=False",")",":","ax","=","plt.figure","(",")",".add_subplot","(","projection=","''","3d","''",")","ax.grid","(","grid",")","ax.set_axis_off","(",")","return","ax"] | 75 | 79 | null | plot_euler_number.py | scikit-image/doc/examples/segmentation/plot_euler_number.py | from skimage.measure import euler_number, label
import matplotlib.pyplot
import numpy | 7 | null | 3 | null | null | 4 | null | Use image node_id 1 for calling a global function with example usage: make_ax(grid) and returns: ax | 99 | node_id 1 | 344,193 |
explode | global | null | false | data | null | null | null | null | data_e | def explode(data):
"""visualization to separate voxels
Data voxels are separated by 0-valued ones so that they appear
separated in the matplotlib figure.
"""
size = np.array(data.shape) * 2
data_e = np.zeros(size - 1, dtype=data.dtype)
data_e[::2, ::2, ::2] = data
return data_e
| ["def","explode","(","data",")",":","``","''","''","visualization","to","separate","voxels","Data","voxels","are","separated","by","0-valued","ones","so","that","they","appear","separated","in","the","matplotlib","figure.","``","''","''","size","=","np.array","(","data.shape",")","*","2","data_e","=","np.zeros","(","si... | 82 | 91 | null | plot_euler_number.py | scikit-image/doc/examples/segmentation/plot_euler_number.py | from skimage.measure import euler_number, label
import matplotlib.pyplot
import numpy | 7 | null | 3 | null | null | 4 | null | Use image node_id 2 for calling a global function with example usage: explode(data) and returns: data_e | 103 | node_id 2 | 344,194 |
cli | global | null | false | null | null | null | null | null | def cli():
"""Commandline interface."""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"--text", type=str, help="text to be turned into audio"
)
parser.add_argument(
"--output_filename",
type=str,
... | ["def","cli","(",")",":","``","''","''","Commandline","interface",".","''","''","''","parser","=","argparse.ArgumentParser","(","formatter_class=argparse.ArgumentDefaultsHelpFormatter",")","parser.add_argument","(","``","--","text","''",",","type=str",",","help=","''","text","to","be","turned","into","audio","''",")","... | 10 | 71 | null | cli.py | ai-audio-speech/bark/cli.py | import argparse
from typing import Dict, Optional, Union
import os
from scipy.io.wavfile import write
from .api import generate_audio
from .generation import SAMPLE_RATE | 7 | null | 6 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: cli() without return types | 96 | node_id 1 | 1,222 | |
fake_decode_token | global | null | false | token | null | null | null | null | User | def fake_decode_token(token):
return User(
username=token + "fakedecoded",
email="john@example.com",
full_name="John Doe",
)
| ["def","fake_decode_token","(","token",")",":","return","User","(","username=token","+","``","fakedecoded","''",",","email=","''","john","@","example.com","''",",","full_name=","''","John","Doe","''",",",")"] | 19 | 22 | null | tutorial002_an_py310.py | fastapi/docs_src/security/tutorial002_an_py310.py | from typing import Annotated
from fastapi import Depends, FastAPI
from fastapi.security import OAuth2PasswordBearer
from pydantic import BaseModel | 7 | null | 4 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: fake_decode_token(token) and returns: User | 112 | node_id 1 | 135,732 |
expand_coordinates | global | null | false | indices | null | null | null | null | x, y, z | def expand_coordinates(indices):
"""
This collapses together pairs of indices, so that
the gaps in the volume array will have a zero width.
"""
x, y, z = indices
x[1::2, :, :] += 1
y[:, 1::2, :] += 1
z[:, :, 1::2] += 1
return x, y, z
| ["def","expand_coordinates","(","indices",")",":","``","''","''","This","collapses","together","pairs","of","indices",",","so","that","the","gaps","in","the","volume","array","will","have","a","zero","width.","``","''","''","x",",","y",",","z","=","indices","x","[","1",":",":2",",",":",",",":","]","+=","1","y","[",":",... | 97 | 106 | null | plot_euler_number.py | scikit-image/doc/examples/segmentation/plot_euler_number.py | from skimage.measure import euler_number, label
import matplotlib.pyplot
import numpy | 7 | null | 3 | null | null | 4 | null | Use image node_id 3 for calling a global function with example usage: expand_coordinates(indices) and returns: x, y, z | 120 | node_id 3 | 344,195 |
prepare_tp_fused_qkvw | global | null | false | module,src,mp_size,gpu_index | null | null | null | null | _bloom_type_transpose,tp_fuseqkv_weight,split_fusedqkv,split_fusedqkv,_glm_type_transpose,torch,_bloom_type_transpose,_transpose_fused_qkvw,_codegen_type_transpose,_glm_type_transpose,_qwen_type_transpose,_bigcode_type_transpose | def prepare_tp_fused_qkvw(module, src, mp_size, gpu_index):
module_str = str(module).strip()
if src is None:
return
fused_type_dict = {
"CodeGenBlock": "codegentype",
"BloomBlock": "bloomtype",
"GLMBlock": "glmtype",
"MPTBlock": "glmtype",
"MptBlock": "glmtype... | ["def","prepare_tp_fused_qkvw","(","module",",","src",",","mp_size",",","gpu_index",")",":","module_str","=","str","(","module",")",".strip","(",")","if","src","is","None",":","return","fused_type_dict","=","{","``","CodeGenBlock","''",":","``","codegentype","''",",","``","BloomBlock","''",":","``","bloomtype","''",","... | 30 | 122 | null | fusedqkv_utils.py | deepspeed/deepspeed/module_inject/fusedqkv_utils.py | import torch
from deepspeed.utils.logging import warning_once
from deepspeed.module_inject.tp_shard import get_shard_size, get_shard_size_list, get_num_kv_heads, get_n_embd
import re | 7 | null | 4 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: prepare_tp_fused_qkvw(module, src, mp_size, gpu_index) and returns: _bloom_type_transpose, tp_fuseqkv_weight, split_fusedqkv, split_fusedqkv, _glm_type_transpose, torch, _bloom_type_transpose, _transpose_fused_qkvw, _codegen_type_transpose, _glm_type... | 377 | node_id 3 | 102,805 |
test_en_lex_attrs_capitals | global | null | false | word | null | null | null | null | null | def test_en_lex_attrs_capitals(word):
assert like_num(word)
assert like_num(word.upper())
| ["def","test_en_lex_attrs_capitals","(","word",")",":","assert","like_num","(","word",")","assert","like_num","(","word.upper","(",")",")"] | 68 | 70 | null | test_text.py | spacy/spacy/tests/lang/en/test_text.py | import pytest
from spacy.lang.en.lex_attrs import like_num | 7 | null | 2 | null | null | 5 | null | Use image node_id 5 for calling a global function with example usage: test_en_lex_attrs_capitals(word) without return types | 123 | node_id 5 | 388,124 |
test_en_lex_attrs_like_number_for_ordinal | global | null | false | word | null | null | null | null | null | def test_en_lex_attrs_like_number_for_ordinal(word):
assert like_num(word)
| ["def","test_en_lex_attrs_like_number_for_ordinal","(","word",")",":","assert","like_num","(","word",")"] | 63 | 64 | null | test_text.py | spacy/spacy/tests/lang/en/test_text.py | import pytest
from spacy.lang.en.lex_attrs import like_num | 7 | null | 2 | null | null | 5 | null | Use image node_id 4 for calling a global function with example usage: test_en_lex_attrs_like_number_for_ordinal(word) without return types | 138 | node_id 4 | 388,123 |
test_lex_attrs_like_number | global | null | false | en_tokenizer,text,match | null | null | null | null | null | def test_lex_attrs_like_number(en_tokenizer, text, match):
tokens = en_tokenizer(text)
assert len(tokens) == 1
assert tokens[0].like_num == match
| ["def","test_lex_attrs_like_number","(","en_tokenizer",",","text",",","match",")",":","tokens","=","en_tokenizer","(","text",")","assert","len","(","tokens",")","==","1","assert","tokens","[","0","]",".like_num","==","match"] | 54 | 57 | null | test_text.py | spacy/spacy/tests/lang/en/test_text.py | import pytest
from spacy.lang.en.lex_attrs import like_num | 7 | null | 2 | null | null | 5 | null | Use image node_id 3 for calling a global function with example usage: test_lex_attrs_like_number(en_tokenizer, text, match) without return types | 144 | node_id 3 | 388,122 |
test_en_tokenizer_handles_cnts | global | null | false | en_tokenizer,text,length | null | null | null | null | null | def test_en_tokenizer_handles_cnts(en_tokenizer, text, length):
tokens = en_tokenizer(text)
assert len(tokens) == length
| ["def","test_en_tokenizer_handles_cnts","(","en_tokenizer",",","text",",","length",")",":","tokens","=","en_tokenizer","(","text",")","assert","len","(","tokens",")","==","length"] | 33 | 35 | null | test_text.py | spacy/spacy/tests/lang/en/test_text.py | import pytest
from spacy.lang.en.lex_attrs import like_num | 7 | null | 2 | null | null | 5 | null | Use image node_id 2 for calling a global function with example usage: test_en_tokenizer_handles_cnts(en_tokenizer, text, length) without return types | 149 | node_id 2 | 388,121 |
test_en_tokenizer_handles_long_text | global | null | false | en_tokenizer | null | null | null | null | null | def test_en_tokenizer_handles_long_text(en_tokenizer):
text = """Tributes pour in for late British Labour Party leader
Tributes poured in from around the world Thursday
to the late Labour Party leader John Smith, who died earlier from a massive
heart attack aged 55.
In Washington, the US State Department issued a... | ["def","test_en_tokenizer_handles_long_text","(","en_tokenizer",")",":","text","=","``","''","''","Tributes","pour","in","for","late","British","Labour","Party","leader","Tributes","poured","in","from","around","the","world","Thursday","to","the","late","Labour","Party","leader","John","Smith",",","who","died","earlier... | 6 | 18 | null | test_text.py | spacy/spacy/tests/lang/en/test_text.py | import pytest
from spacy.lang.en.lex_attrs import like_num | 7 | null | 2 | null | null | 5 | null | Use image node_id 1 for calling a global function with example usage: test_en_tokenizer_handles_long_text(en_tokenizer) without return types | 140 | node_id 1 | 388,120 |
display_voxels | global | null | false | volume | null | null | null | null | null | def display_voxels(volume):
"""
volume: (N,M,P) array
Represents a binary set of pixels: objects are marked with 1,
complementary (porosities) with 0.
The voxels are actually represented with blue transparent surfaces.
Inner porosities are represented in red.
"""
# defi... | ["def","display_voxels","(","volume",")",":","``","''","''","volume",":","(","N",",","M",",","P",")","array","Represents","a","binary","set","of","pixels",":","objects","are","marked","with","1",",","complementary","(","porosities",")","with","0",".","The","voxels","are","actually","represented","with","blue","transpar... | 109 | 139 | null | plot_euler_number.py | scikit-image/doc/examples/segmentation/plot_euler_number.py | from skimage.measure import euler_number, label
import matplotlib.pyplot
import numpy | 7 | null | 3 | null | null | 4 | null | Use image node_id 4 for calling a global function with example usage: display_voxels(volume) without return types | 113 | node_id 4 | 344,196 |
__init__ | C2SimpleNet | null | true | self,op_name,num_inputs,debug | This module constructs a net with 'op_name' operator. The net consist
a series of such operator.
It initializes the workspace with input blob equal to the number of parameters
needed for the op.
Provides forward method to run the net niter times. | ["This","module","constructs","a","net","with","'op_name","'","operator",".","The","net","consist","a","series","of","such","operator",".","It","initializes","the","workspace","with","input","blob","equal","to","the","number","of","parameters","needed","for","the","op",".","Provides","forward","method","to","run","the"... | null | null | C2SimpleNet | def __init__(self, op_name, num_inputs=1, debug=False):
self.input_names = []
self.net = core.Net("framework_benchmark_net")
self.input_names = [f"in_{i}" for i in range(num_inputs)]
for i in range(num_inputs):
add_blob(workspace, self.input_names[i], [1])
self.net.AddExternalInputs(self.inp... | ["def","__init__","(","self",",","op_name",",","num_inputs=1",",","debug=False",")",":","self.input_names","=","[","]","self.net","=","core.Net","(","``","framework_benchmark_net","''",")","self.input_names","=","[","f","''","in_","{","i","}","''","for","i","in","range","(","num_inputs",")","]","for","i","in","range","... | 23 | 41 | null | C2Module.py | pytorch/benchmarks/framework_overhead_benchmark/C2Module.py | import numpy
from caffe2.python import core, workspace
from utils import NUM_LOOP_ITERS | 7 | 1 | 3 | 0 | null | 1 | 2 | Use image node_id 1 to create a new C2SimpleNet object with example: obj = C2SimpleNet(op_name, num_inputs, debug) | 115 | node_id 1 | 271,494 |
__init__ | Retry | null | true | self,backoff,retries,supported_errors | Retry a specific number of times after a failure | ["Retry","a","specific","number","of","times","after","a","failure"] | Initialize a `Retry` object with a `Backoff` object
that retries a maximum of `retries` times.
`retries` can be negative to retry forever.
You can specify the types of supported errors which trigger
a retry with the `supported_errors` parameter. | ["Initialize","a","`","Retry","`","object","with","a","`","Backoff","`","object","that","retries","a","maximum","of","`","retries","`","times",".","`","retries","`","can","be","negative","to","retry","forever",".","You","can","specify","the","types","of","supported","errors","which","trigger","a","retry","with","the","... | Retry | def __init__(
self,
backoff,
retries,
supported_errors=(ConnectionError, TimeoutError, socket.timeout),
):
"""
Initialize a `Retry` object with a `Backoff` object
that retries a maximum of `retries` times.
`retries` can be negative to retry forever.
You can specify the types of suppo... | ["def","__init__","(","self",",","backoff",",","retries",",","supported_errors=","(","ConnectionError",",","TimeoutError",",","socket.timeout",")",",",")",":","``","''","''","Initialize","a","`","Retry","`","object","with","a","`","Backoff","`","object","that","retries","a","maximum","of","`","retries","`","times",".",... | 10 | 25 | null | retry.py | redis-py/redis/retry.py | import socket
from time import sleep
from redis.exceptions import ConnectionError, TimeoutError | 7 | 1 | 3 | 0 | null | 0 | 3 | Use image node_id 1 to create a new Retry object with example: obj = Retry(backoff, retries, supported_errors) | 111 | node_id 1 | 339,330 |
test_polygon_area | global | null | false | null | null | null | null | null | def test_polygon_area():
x = [0, 0, 1, 1]
y = [0, 1, 1, 0]
assert_almost_equal(polygon_area(y, x), 1)
x = [0, 0, 1]
y = [0, 1, 1]
assert_almost_equal(polygon_area(y, x), 0.5)
x = [0, 0, 0.5, 1, 1, 0.5]
y = [0, 1, 0.5, 1, 0, 0.5]
assert_almost_equal(polygon_area(y, x), 0.5)
| ["def","test_polygon_area","(",")",":","x","=","[","0",",","0",",","1",",","1","]","y","=","[","0",",","1",",","1",",","0","]","assert_almost_equal","(","polygon_area","(","y",",","x",")",",","1",")","x","=","[","0",",","0",",","1","]","y","=","[","0",",","1",",","1","]","assert_almost_equal","(","polygon_area","(","y"... | 38 | 52 | null | test_geometry.py | scikit-image/skimage/_shared/tests/test_geometry.py | import pytest
from skimage._shared._geometry import polygon_clip, polygon_area
import numpy
from numpy.testing import assert_equal, assert_almost_equal | 7 | null | 4 | null | null | 3 | null | Use image node_id 1 for calling a global function with example usage: test_polygon_area() without return types | 110 | node_id 1 | 344,274 | |
test_return_sequences | Seq2seqTrainerTester | TestCasePlus | true | self | null | null | null | null | model_inputs | def test_return_sequences(self):
# Tests that the number of generated sequences is correct when num_return_sequences > 1
# and essentially ensuring that `accelerator.gather()` is used instead of `gather_for_metrics`
INPUT_COLUMN = "question"
TARGET_COLUMN = "answer"
MAX_INPUT_LENGTH = 256
MAX_TA... | ["def","test_return_sequences","(","self",")",":","#","Tests","that","the","number","of","generated","sequences","is","correct","when","num_return_sequences",">","1","#","and","essentially","ensuring","that","`","accelerator.gather","(",")","`","is","used","instead","of","`","gather_for_metrics","`","INPUT_COLUMN","=",... | 138 | 183 | null | test_trainer_seq2seq.py | transformers/tests/trainer/test_trainer_seq2seq.py | from transformers import AutoModelForSeq2SeqLM, BertTokenizer, DataCollatorForSeq2Seq, EncoderDecoderModel, GenerationConfig, Seq2SeqTrainer, Seq2SeqTrainingArguments, T5Tokenizer
from transformers.testing_utils import TestCasePlus, require_torch, slow
from transformers.utils import is_datasets_available | 7 | 1 | 3 | 1 | 1 | 0 | 2 | Use image node_id 2 for calling the Seq2seqTrainerTester obj's underlying member method code with example usage: obj.test_return_sequences() and returns: model_inputs | 166 | node_id 2 | 486,561 |
main | global | null | false | null | null | null | null | null | def main():
bench("eager ", add1)
bench("compiled", torch.compile(add1))
| ["def","main","(",")",":","bench","(","``","eager","``",",","add1",")","bench","(","``","compiled","''",",","torch.compile","(","add1",")",")"] | 24 | 26 | null | overheads.py | pytorch/benchmarks/dynamo/microbenchmarks/overheads.py | import time
import timeit
import numpy
import torch | 7 | null | 4 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: main() without return types | 97 | node_id 3 | 271,273 | |
test_hooks_unpickle | global | null | false | null | null | null | null | str | def test_hooks_unpickle():
def inner_func(d1, d2):
return "hello!"
nlp = Language()
doc = nlp("Hello")
doc.user_hooks["similarity"] = inner_func
b = pickle.dumps(doc)
doc2 = pickle.loads(b)
assert doc2.similarity(None) == "hello!"
| ["def","test_hooks_unpickle","(",")",":","def","inner_func","(","d1",",","d2",")",":","return","``","hello","!","''","nlp","=","Language","(",")","doc","=","nlp","(","``","Hello","''",")","doc.user_hooks","[","``","similarity","''","]","=","inner_func","b","=","pickle.dumps","(","doc",")","doc2","=","pickle.loads","(",... | 45 | 54 | null | test_pickle_doc.py | spacy/spacy/tests/doc/test_pickle_doc.py | from spacy.compat import pickle
from spacy.language import Language | 7 | null | 2 | null | null | 5 | null | Use image node_id 5 for calling a global function with example usage: test_hooks_unpickle() and returns: str | 108 | node_id 5 | 387,845 | |
tir_call | global | null | false | ib,extern,name | null | null | null | null | buf,tvm,tvm | def tir_call(ib: tvm.tir.ir_builder, extern: bool, name: str, *args):
"""
ib: ir_builder
extern: bool
True --> tvm.tir.call_extern
False --> tvm.tir.call_packed
name: str
function name
*args:
arguments for function call
"""
def buf_from_array(ib, arr, dtype)... | ["def","tir_call","(","ib",":","tvm.tir.ir_builder",",","extern",":","bool",",","name",":","str",",","*","args",")",":","``","''","''","ib",":","ir_builder","extern",":","bool","True","--",">","tvm.tir.call_extern","False","--",">","tvm.tir.call_packed","name",":","str","function","name","*","args",":","arguments","for... | 105 | 136 | null | passes.py | tvm/apps/uma/_template/passes.py | import tvm
from tvm import tir
from tvm.relay.backend.contrib.uma.api.utils import add_llvm_to_block | 7 | null | 3 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: tir_call(ib, extern, name) and returns: buf, tvm, tvm | 123 | node_id 1 | 487,914 |
_my_ai_hw_conv2d_pass | MyAiHwConv2dPass | null | true | cls,func,mod,ctx | null | null | null | null | r,unknown,op,func,func,irb_result,op | def _my_ai_hw_conv2d_pass(cls, func, mod, ctx):
_loops = dict()
_handles = []
_entry_node = None
def _has_block(name: str, func: tvm.tir.PrimFunc) -> bool:
"""
Determine of a tir.block with `name` exists in `func`
"""
def _hb(op):
if isinstance(op, tvm.tir.B... | ["def","_my_ai_hw_conv2d_pass","(","cls",",","func",",","mod",",","ctx",")",":","_loops","=","dict","(",")","_handles","=","[","]","_entry_node","=","None","def","_has_block","(","name",":","str",",","func",":","tvm.tir.PrimFunc",")","-",">","bool",":","``","''","''","Determine","of","a","tir.block","with","`","name","... | 35 | 102 | null | passes.py | tvm/apps/uma/_template/passes.py | import tvm
from tvm import tir
from tvm.relay.backend.contrib.uma.api.utils import add_llvm_to_block | 7 | 1 | 3 | 0 | null | 1 | 2 | Use image node_id 2 for calling the MyAiHwConv2dPass obj's underlying member method code with example usage: obj._my_ai_hw_conv2d_pass(cls, func, mod, ctx) and returns: r, unknown, op, func, func, irb_result, op | 211 | node_id 2 | 487,913 |
transform_function | MyAiHwConv2dPass | null | true | self,func,mod,ctx | null | null | null | null | self | def transform_function(
self,
func: tvm.tir.PrimFunc,
mod: tvm.ir.IRModule,
ctx: tvm.ir.transform.PassContext,
) -> tvm.tir.PrimFunc:
return self._my_ai_hw_conv2d_pass(func, mod, ctx)
| ["def","transform_function","(","self",",","func",":","tvm.tir.PrimFunc",",","mod",":","tvm.ir.IRModule",",","ctx",":","tvm.ir.transform.PassContext",",",")","-",">","tvm.tir.PrimFunc",":","return","self._my_ai_hw_conv2d_pass","(","func",",","mod",",","ctx",")"] | 29 | 32 | null | passes.py | tvm/apps/uma/_template/passes.py | import tvm
from tvm import tir
from tvm.relay.backend.contrib.uma.api.utils import add_llvm_to_block | 7 | 1 | 3 | 0 | null | 1 | 2 | Use image node_id 1 for calling the MyAiHwConv2dPass obj's underlying member method code with example usage: obj.transform_function(func, mod, ctx) and returns: self | 165 | node_id 1 | 487,912 |
pytest_cmdline_preparse | global | null | false | config,args | null | null | null | null | null | def pytest_cmdline_preparse(config, args):
if "--unskip-missing-tokenizer" not in args:
return
global unskip_missing_tokenizer
unskip_missing_tokenizer = True
| ["def","pytest_cmdline_preparse","(","config",",","args",")",":","if","``","--","unskip-missing-tokenizer","''","not","in","args",":","return","global","unskip_missing_tokenizer","unskip_missing_tokenizer","=","True"] | 46 | 50 | null | conftest.py | llama-recipes/tests/conftest.py | import pytest
from transformers import LlamaTokenizer | 7 | null | 2 | null | null | 5 | null | Use image node_id 5 for calling a global function with example usage: pytest_cmdline_preparse(config, args) without return types | 128 | node_id 5 | 166,997 |
downgrade | global | null | false | null | null | null | null | null | def downgrade():
bind = op.get_bind()
session = Session(bind=bind)
# Add the old permissions on the migration itself
add_pvms(session, get_reversed_new_pvms(PVM_MAP))
migrate_roles(session, get_reversed_pvm_map(PVM_MAP))
try:
session.commit()
except SQLAlchemyError as ex:
pr... | ["def","downgrade","(",")",":","bind","=","op.get_bind","(",")","session","=","Session","(","bind=bind",")","#","Add","the","old","permissions","on","the","migration","itself","add_pvms","(","session",",","get_reversed_new_pvms","(","PVM_MAP",")",")","migrate_roles","(","session",",","get_reversed_pvm_map","(","PVM_MAP... | 96 | 108 | null | 2020-12-14_10-49_42b4c9e01447_security_converge_databases.py | superset/superset/migrations/versions/2020-12-14_10-49_42b4c9e01447_security_converge_databases.py | import sqlalchemy
from alembic import op
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import Session
from superset.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm | 7 | null | 5 | null | null | 2 | null | Use image node_id 2 for calling a global function with example usage: downgrade() without return types | 102 | node_id 2 | 437,212 | |
test_dtype | global | null | false | null | null | null | null | null | def test_dtype():
dtype = tvm.DataType("handle")
assert dtype.type_code == tvm.DataTypeCode.HANDLE
| ["def","test_dtype","(",")",":","dtype","=","tvm.DataType","(","``","handle","''",")","assert","dtype.type_code","==","tvm.DataTypeCode.HANDLE"] | 63 | 65 | null | test_runtime_ndarray.py | tvm/tests/python/all-platform-minimal-test/test_runtime_ndarray.py | import tvm
from tvm import te
import numpy
import tvm.testing | 7 | null | 4 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: test_dtype() without return types | 103 | node_id 3 | 498,113 | |
upgrade | global | null | false | null | null | null | null | null | def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
# Add the new permissions on the migration itself
add_pvms(session, NEW_PVMS)
migrate_roles(session, PVM_MAP)
try:
session.commit()
except SQLAlchemyError as ex:
print(f"An error occurred while upgrading permis... | ["def","upgrade","(",")",":","bind","=","op.get_bind","(",")","session","=","Session","(","bind=bind",")","#","Add","the","new","permissions","on","the","migration","itself","add_pvms","(","session",",","NEW_PVMS",")","migrate_roles","(","session",",","PVM_MAP",")","try",":","session.commit","(",")","except","SQLAlchem... | 82 | 93 | null | 2020-12-14_10-49_42b4c9e01447_security_converge_databases.py | superset/superset/migrations/versions/2020-12-14_10-49_42b4c9e01447_security_converge_databases.py | import sqlalchemy
from alembic import op
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import Session
from superset.migrations.shared.security_converge import add_pvms, get_reversed_new_pvms, get_reversed_pvm_map, migrate_roles, Pvm | 7 | null | 5 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: upgrade() without return types | 100 | node_id 1 | 437,211 | |
test_pod_log_reader_rolebinding | TestPodReader | null | true | self,triggerer,webserver,expected | Tests RBAC Pod Reader. | ["Tests","RBAC","Pod","Reader","."] | null | null | null | def test_pod_log_reader_rolebinding(
self, triggerer, webserver, expected
):
docs = render_chart(
values={
"triggerer": {"enabled": triggerer},
"webserver": {"allowPodLogReading": webserver},
},
show_only=["templates/rbac/pod-log-reader-rolebinding.yaml"],
)
... | ["def","test_pod_log_reader_rolebinding","(","self",",","triggerer",",","webserver",",","expected",")",":","docs","=","render_chart","(","values=","{","``","triggerer","''",":","{","``","enabled","''",":","triggerer","}",",","``","webserver","''",":","{","``","allowPodLogReading","''",":","webserver","}",",","}",",","s... | 37 | 46 | null | test_rbac_pod_log_reader.py | airflow/helm_tests/security/test_rbac_pod_log_reader.py | from __future__ import annotations
import jmespath
import pytest
from tests.charts.helm_template_generator import render_chart | 7 | 1 | 4 | 0 | null | 0 | 2 | Use image node_id 1 for calling the TestPodReader obj's underlying member method code with example usage: obj.test_pod_log_reader_rolebinding(triggerer, webserver, expected) without return types | 194 | node_id 1 | 13,850 |
test_pod_log_reader_role | TestPodReader | null | true | self,triggerer,webserver,expected | Tests RBAC Pod Reader. | ["Tests","RBAC","Pod","Reader","."] | null | null | null | def test_pod_log_reader_role(self, triggerer, webserver, expected):
docs = render_chart(
values={
"triggerer": {"enabled": triggerer},
"webserver": {"allowPodLogReading": webserver},
},
show_only=["templates/rbac/pod-log-reader-role.yaml"],
)
actual = (
... | ["def","test_pod_log_reader_role","(","self",",","triggerer",",","webserver",",","expected",")",":","docs","=","render_chart","(","values=","{","``","triggerer","''",":","{","``","enabled","''",":","triggerer","}",",","``","webserver","''",":","{","``","allowPodLogReading","''",":","webserver","}",",","}",",","show_onl... | 57 | 66 | null | test_rbac_pod_log_reader.py | airflow/helm_tests/security/test_rbac_pod_log_reader.py | from __future__ import annotations
import jmespath
import pytest
from tests.charts.helm_template_generator import render_chart | 7 | 1 | 4 | 0 | null | 0 | 2 | Use image node_id 2 for calling the TestPodReader obj's underlying member method code with example usage: obj.test_pod_log_reader_role(triggerer, webserver, expected) without return types | 187 | node_id 2 | 13,851 |
test_env_after_learn | global | null | false | algo | null | null | null | null | env | def test_env_after_learn(algo):
def make_env():
# acktr requires too much RAM, fails on travis
env = gym.make(
"CartPole-v1" if algo == "acktr" else "PongNoFrameskip-v4"
)
return env
make_session(make_default=True, graph=tf.Graph())
env = SubprocVecEnv([make_env]... | ["def","test_env_after_learn","(","algo",")",":","def","make_env","(",")",":","#","acktr","requires","too","much","RAM",",","fails","on","travis","env","=","gym.make","(","``","CartPole-v1","''","if","algo","==","``","acktr","''","else","``","PongNoFrameskip-v4","''",")","return","env","make_session","(","make_default=... | 12 | 27 | null | test_env_after_learn.py | baselines/baselines/common/tests/test_env_after_learn.py | import pytest
import gym
import tensorflow
from baselines.common.vec_env.subproc_vec_env import SubprocVecEnv
from baselines.run import get_learn_function
from baselines.common.tf_util import make_session | 7 | null | 6 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: test_env_after_learn(algo) and returns: env | 113 | node_id 1 | 35,143 |
find_constraint_name | global | null | false | upgrade | null | null | null | null | generic_find_constraint_name | def find_constraint_name(upgrade=True):
cols = {"column_name"} if upgrade else {"datasource_name"}
return generic_find_constraint_name(
table="columns",
columns=cols,
referenced="datasources",
database=db,
)
| ["def","find_constraint_name","(","upgrade=True",")",":","cols","=","{","``","column_name","''","}","if","upgrade","else","{","``","datasource_name","''","}","return","generic_find_constraint_name","(","table=","''","columns","''",",","columns=cols",",","referenced=","''","datasources","''",",","database=db",",",")"] | 41 | 45 | null | 2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py | superset/superset/migrations/versions/2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py | import logging
from alembic import op
from superset import db
from superset.utils.core import generic_find_constraint_name | 7 | null | 4 | null | null | 3 | null | Use image node_id 1 for calling a global function with example usage: find_constraint_name(upgrade) and returns: generic_find_constraint_name | 141 | node_id 1 | 436,850 |
test_strict_warning_false | global | null | false | setup,strictness | null | null | null | null | null | def test_strict_warning_false(setup, strictness):
# If the user doesn't wish to be strict about warnings
# the following shouldn't raise any error
os.environ["SKIMAGE_TEST_STRICT_WARNINGS"] = strictness
with expected_warnings(["some warnings"]):
pass
| ["def","test_strict_warning_false","(","setup",",","strictness",")",":","#","If","the","user","does","n't","wish","to","be","strict","about","warnings","#","the","following","should","n't","raise","any","error","os.environ","[","``","SKIMAGE_TEST_STRICT_WARNINGS","''","]","=","strictness","with","expected_warnings","("... | 32 | 37 | null | test_warnings.py | scikit-image/skimage/_shared/tests/test_warnings.py | import os
from skimage._shared._warnings import expected_warnings
import pytest | 7 | null | 3 | null | null | 4 | null | Use image node_id 4 for calling a global function with example usage: test_strict_warning_false(setup, strictness) without return types | 135 | node_id 4 | 344,315 |
test_strict_warning_true | global | null | false | setup,strictness | null | null | null | null | null | def test_strict_warning_true(setup, strictness):
os.environ["SKIMAGE_TEST_STRICT_WARNINGS"] = strictness
with pytest.raises(ValueError):
with expected_warnings(["some warnings"]):
pass
| ["def","test_strict_warning_true","(","setup",",","strictness",")",":","os.environ","[","``","SKIMAGE_TEST_STRICT_WARNINGS","''","]","=","strictness","with","pytest.raises","(","ValueError",")",":","with","expected_warnings","(","[","``","some","warnings","''","]",")",":","pass"] | 24 | 28 | null | test_warnings.py | scikit-image/skimage/_shared/tests/test_warnings.py | import os
from skimage._shared._warnings import expected_warnings
import pytest | 7 | null | 3 | null | null | 4 | null | Use image node_id 3 for calling a global function with example usage: test_strict_warning_true(setup, strictness) without return types | 134 | node_id 3 | 344,314 |
test_strict_warnigns_default | global | null | false | setup | null | null | null | null | null | def test_strict_warnigns_default(setup):
# By default we should fail on missing expected warnings
with pytest.raises(ValueError):
with expected_warnings(["some warnings"]):
pass
| ["def","test_strict_warnigns_default","(","setup",")",":","#","By","default","we","should","fail","on","missing","expected","warnings","with","pytest.raises","(","ValueError",")",":","with","expected_warnings","(","[","``","some","warnings","''","]",")",":","pass"] | 16 | 20 | null | test_warnings.py | scikit-image/skimage/_shared/tests/test_warnings.py | import os
from skimage._shared._warnings import expected_warnings
import pytest | 7 | null | 3 | null | null | 4 | null | Use image node_id 2 for calling a global function with example usage: test_strict_warnigns_default(setup) without return types | 126 | node_id 2 | 344,313 |
test_poly_clip | global | null | false | null | null | null | null | null | def test_poly_clip():
x = [0, 1, 2, 1]
y = [0, -1, 0, 1]
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
assert_equal(polygon_area(yc, xc), 0.5)
x = [-1, 1.5, 1.5, -1]
y = [0.5, 0.5, 1.5, 1.5]
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
assert_equal(polygon_area(yc, xc), 0.5)
| ["def","test_poly_clip","(",")",":","x","=","[","0",",","1",",","2",",","1","]","y","=","[","0",",","-1",",","0",",","1","]","yc",",","xc","=","polygon_clip","(","y",",","x",",","0",",","0",",","1",",","1",")","assert_equal","(","polygon_area","(","yc",",","xc",")",",","0.5",")","x","=","[","-1",",","1.5",",","1.5",","... | 55 | 65 | null | test_geometry.py | scikit-image/skimage/_shared/tests/test_geometry.py | import pytest
from skimage._shared._geometry import polygon_clip, polygon_area
import numpy
from numpy.testing import assert_equal, assert_almost_equal | 7 | null | 4 | null | null | 3 | null | Use image node_id 2 for calling a global function with example usage: test_poly_clip() without return types | 107 | node_id 2 | 344,275 | |
test_hand_clip | global | null | false | null | null | null | null | null | def test_hand_clip():
(r0, c0, r1, c1) = (1.0, 1.5, 2.1, 2.5)
clip_r, clip_c = polygon_clip(
hand[:, 1], hand[:, 0], r0, c0, r1, c1
)
assert_equal(clip_r.size, 19)
assert_equal(clip_r[0], clip_r[-1])
assert_equal(clip_c[0], clip_c[-1])
(r0, c0, r1, c1) = (1.0, 1.5, 1.7, 2.5)
cli... | ["def","test_hand_clip","(",")",":","(","r0",",","c0",",","r1",",","c1",")","=","(","1.0",",","1.5",",","2.1",",","2.5",")","clip_r",",","clip_c","=","polygon_clip","(","hand","[",":",",","1","]",",","hand","[",":",",","0","]",",","r0",",","c0",",","r1",",","c1",")","assert_equal","(","clip_r.size",",","19",")","assert... | 68 | 81 | null | test_geometry.py | scikit-image/skimage/_shared/tests/test_geometry.py | import pytest
from skimage._shared._geometry import polygon_clip, polygon_area
import numpy
from numpy.testing import assert_equal, assert_almost_equal | 7 | null | 4 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: test_hand_clip() without return types | 107 | node_id 3 | 344,276 | |
test_call_works_when_ngram_size_is_1 | TestNGramRepeatBlockProcessor | null | true | self | null | null | null | null | null | def test_call_works_when_ngram_size_is_1(self) -> None:
seq1 = torch.tensor([1, 3, 0], device=device)
seq2 = torch.tensor([2, 1, 1], device=device)
seqs = torch.stack([seq1, seq2])
probs1 = torch.tensor([0.1, 0.1, 0.1, 0.1], device=device)
probs2 = torch.tensor([0.1, 0.1, 0.1, 0.1], device=device)... | ["def","test_call_works_when_ngram_size_is_1","(","self",")","-",">","None",":","seq1","=","torch.tensor","(","[","1",",","3",",","0","]",",","device=device",")","seq2","=","torch.tensor","(","[","2",",","1",",","1","]",",","device=device",")","seqs","=","torch.stack","(","[","seq1",",","seq2","]",")","probs1","=","tor... | 35 | 51 | null | test_step_processor.py | fairseq2/tests/unit/generation/test_step_processor.py | import torch
from fairseq2.generation import NGramRepeatBlockProcessor
from tests.common import assert_close, device | 7 | 1 | 3 | 0 | null | 0 | 3 | Use image node_id 2 for calling the TestNGramRepeatBlockProcessor obj's underlying member method code with example usage: obj.test_call_works_when_ngram_size_is_1() without return types | 185 | node_id 2 | 133,804 |
get_credentials | UsernamePasswordCredentialProvider | CredentialProvider | true | self | Simple implementation of CredentialProvider that just wraps static
username and password. | ["Simple","implementation","of","CredentialProvider","that","just","wraps","static","username","and","password","."] | null | null | self,self, self | def get_credentials(self):
if self.username:
return self.username, self.password
return (self.password,)
| ["def","get_credentials","(","self",")",":","if","self.username",":","return","self.username",",","self.password","return","(","self.password",",",")"] | 23 | 26 | null | credentials.py | redis-py/redis/credentials.py | from typing import Optional, Tuple, Union | 7 | 2 | 1 | 1 | 1 | 0 | 2 | Use image node_id 2 for calling the UsernamePasswordCredentialProvider obj's underlying member method code with example usage: obj.get_credentials() and returns: self, self, self | 179 | node_id 2 | 339,298 |
__init__ | UsernamePasswordCredentialProvider | CredentialProvider | true | self,username,password | Simple implementation of CredentialProvider that just wraps static
username and password. | ["Simple","implementation","of","CredentialProvider","that","just","wraps","static","username","and","password","."] | null | null | UsernamePasswordCredentialProvider | def __init__(
self,
username: Optional[str] = None,
password: Optional[str] = None,
):
self.username = username or ""
self.password = password or ""
| ["def","__init__","(","self",",","username",":","Optional","[","str","]","=","None",",","password",":","Optional","[","str","]","=","None",",",")",":","self.username","=","username","or","``","''","self.password","=","password","or","``","''"] | 19 | 21 | null | credentials.py | redis-py/redis/credentials.py | from typing import Optional, Tuple, Union | 7 | 2 | 1 | 1 | 1 | 0 | 2 | Use image node_id 1 to create a new UsernamePasswordCredentialProvider object from inherited base classes: CredentialProvider with example: obj = UsernamePasswordCredentialProvider(username, password) | 200 | node_id 1 | 339,297 |
get_credentials | CredentialProvider | null | true | self | Credentials Provider. | ["Credentials","Provider","."] | null | null | null | def get_credentials(self) -> Union[Tuple[str], Tuple[str, str]]:
raise NotImplementedError("get_credentials must be implemented")
| ["def","get_credentials","(","self",")","-",">","Union","[","Tuple","[","str","]",",","Tuple","[","str",",","str","]","]",":","raise","NotImplementedError","(","``","get_credentials","must","be","implemented","''",")"] | 9 | 10 | credentials.py | redis-py/redis/credentials.py | from typing import Optional, Tuple, Union | 7 | 2 | 1 | 1 | null | 0 | 1 | Use image node_id 1 for calling the CredentialProvider obj's underlying member method code with example usage: obj.get_credentials() without return types | 153 | node_id 1 | 339,296 | |
fake_decode_token | global | null | false | token | null | null | null | null | User | def fake_decode_token(token):
return User(
username=token + "fakedecoded",
email="john@example.com",
full_name="John Doe",
)
| ["def","fake_decode_token","(","token",")",":","return","User","(","username=token","+","``","fakedecoded","''",",","email=","''","john","@","example.com","''",",","full_name=","''","John","Doe","''",",",")"] | 19 | 22 | null | tutorial002.py | fastapi/docs_src/security/tutorial002.py | from typing import Union
from fastapi import Depends, FastAPI
from fastapi.security import OAuth2PasswordBearer
from pydantic import BaseModel | 7 | null | 4 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: fake_decode_token(token) and returns: User | 112 | node_id 1 | 135,730 |
downgrade | global | null | false | null | null | null | null | null | def downgrade():
constraint = (
find_constraint_name(False)
or "fk_columns_datasource_name_datasources"
)
with op.batch_alter_table(
"columns", naming_convention=naming_convention
) as batch_op:
batch_op.drop_constraint(constraint, type_="foreignkey")
batch_op.cre... | ["def","downgrade","(",")",":","constraint","=","(","find_constraint_name","(","False",")","or","``","fk_columns_datasource_name_datasources","''",")","with","op.batch_alter_table","(","``","columns","''",",","naming_convention=naming_convention",")","as","batch_op",":","batch_op.drop_constraint","(","constraint",",","... | 66 | 77 | null | 2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py | superset/superset/migrations/versions/2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py | import logging
from alembic import op
from superset import db
from superset.utils.core import generic_find_constraint_name | 7 | null | 4 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: downgrade() without return types | 102 | node_id 3 | 436,852 | |
llama_tokenizer | global | null | false | null | null | null | null | None,LlamaTokenizer | def llama_tokenizer():
try:
return LlamaTokenizer.from_pretrained(
"meta-llama/Llama-2-7b-hf"
)
except OSError as e:
if unskip_missing_tokenizer:
raise e
return None
| ["def","llama_tokenizer","(",")",":","try",":","return","LlamaTokenizer.from_pretrained","(","``","meta-llama\/Llama-2-7b-hf","''",")","except","OSError","as","e",":","if","unskip_missing_tokenizer",":","raise","e","return","None"] | 13 | 19 | null | conftest.py | llama-recipes/tests/conftest.py | import pytest
from transformers import LlamaTokenizer | 7 | null | 2 | null | null | 5 | null | Use image node_id 1 for calling a global function with example usage: llama_tokenizer() and returns: None, LlamaTokenizer | 121 | node_id 1 | 166,993 | |
setup_tokenizer | global | null | false | llama_tokenizer | null | null | null | null | _helper | def setup_tokenizer(llama_tokenizer):
def _helper(tokenizer_mock):
# Align with Llama 2 tokenizer
tokenizer_mock.from_pretrained.return_value = llama_tokenizer
return _helper
| ["def","setup_tokenizer","(","llama_tokenizer",")",":","def","_helper","(","tokenizer_mock",")",":","#","Align","with","Llama","2","tokenizer","tokenizer_mock.from_pretrained.return_value","=","llama_tokenizer","return","_helper"] | 23 | 28 | null | conftest.py | llama-recipes/tests/conftest.py | import pytest
from transformers import LlamaTokenizer | 7 | null | 2 | null | null | 5 | null | Use image node_id 2 for calling a global function with example usage: setup_tokenizer(llama_tokenizer) and returns: _helper | 123 | node_id 2 | 166,994 |
skip_if_tokenizer_is_missing | global | null | false | request,llama_tokenizer | null | null | null | null | null | def skip_if_tokenizer_is_missing(request, llama_tokenizer):
if (
request.node.get_closest_marker("skip_missing_tokenizer")
and not unskip_missing_tokenizer
):
if llama_tokenizer is None:
pytest.skip(ACCESS_ERROR_MSG)
| ["def","skip_if_tokenizer_is_missing","(","request",",","llama_tokenizer",")",":","if","(","request.node.get_closest_marker","(","``","skip_missing_tokenizer","''",")","and","not","unskip_missing_tokenizer",")",":","if","llama_tokenizer","is","None",":","pytest.skip","(","ACCESS_ERROR_MSG",")"] | 32 | 35 | null | conftest.py | llama-recipes/tests/conftest.py | import pytest
from transformers import LlamaTokenizer | 7 | null | 2 | null | null | 5 | null | Use image node_id 3 for calling a global function with example usage: skip_if_tokenizer_is_missing(request, llama_tokenizer) without return types | 145 | node_id 3 | 166,995 |
pytest_addoption | global | null | false | parser | null | null | null | null | null | def pytest_addoption(parser):
parser.addoption(
"--unskip-missing-tokenizer",
action="store_true",
default=False,
help="disable skip missing tokenizer",
)
| ["def","pytest_addoption","(","parser",")",":","parser.addoption","(","``","--","unskip-missing-tokenizer","''",",","action=","''","store_true","''",",","default=False",",","help=","''","disable","skip","missing","tokenizer","''",",",")"] | 38 | 42 | null | conftest.py | llama-recipes/tests/conftest.py | import pytest
from transformers import LlamaTokenizer | 7 | null | 2 | null | null | 5 | null | Use image node_id 4 for calling a global function with example usage: pytest_addoption(parser) without return types | 115 | node_id 4 | 166,996 |
upgrade | global | null | false | null | null | null | null | null | def upgrade():
try:
constraint = find_constraint_name()
with op.batch_alter_table(
"columns", naming_convention=naming_convention
) as batch_op:
if constraint:
batch_op.drop_constraint(
constraint, type_="foreignkey"
... | ["def","upgrade","(",")",":","try",":","constraint","=","find_constraint_name","(",")","with","op.batch_alter_table","(","``","columns","''",",","naming_convention=naming_convention",")","as","batch_op",":","if","constraint",":","batch_op.drop_constraint","(","constraint",",","type_=","''","foreignkey","''",")","batch_... | 48 | 63 | null | 2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py | superset/superset/migrations/versions/2016-05-27_15-03_1226819ee0e3_fix_wrong_constraint_on_table_columns.py | import logging
from alembic import op
from superset import db
from superset.utils.core import generic_find_constraint_name | 7 | null | 4 | null | null | 3 | null | Use image node_id 2 for calling a global function with example usage: upgrade() without return types | 100 | node_id 2 | 436,851 | |
setup | global | null | false | null | null | null | null | null | def setup():
# Remove any environment variable if it exists
old_strictness = os.environ.pop(
"SKIMAGE_TEST_STRICT_WARNINGS", None
)
yield
# Add the user's desired strictness
if old_strictness is not None:
os.environ["SKIMAGE_TEST_STRICT_WARNINGS"] = old_strictness
| ["def","setup","(",")",":","#","Remove","any","environment","variable","if","it","exists","old_strictness","=","os.environ.pop","(","``","SKIMAGE_TEST_STRICT_WARNINGS","''",",","None",")","yield","#","Add","the","user","'s","desired","strictness","if","old_strictness","is","not","None",":","os.environ","[","``","SKIMAG... | 7 | 13 | null | test_warnings.py | scikit-image/skimage/_shared/tests/test_warnings.py | import os
from skimage._shared._warnings import expected_warnings
import pytest | 7 | null | 3 | null | null | 4 | null | Use image node_id 1 for calling a global function with example usage: setup() without return types | 98 | node_id 1 | 344,312 | |
test_benchmark_infra_runs | TestDynamoBenchmark | unittest | true | self | null | null | Basic smoke test that TorchBench runs.
This test is mainly meant to check that our setup in fbcode
doesn't break.
If you see a failure here related to missing CPP headers, then
you likely need to update the resources list in:
//caffe2:inductor | ["Basic","smoke","test","that","TorchBench","runs",".","This","test","is","mainly","meant","to","check","that","our","setup","in","fbcode","does","n't","break",".","If","you","see","a","failure","here","related","to","missing","CPP","headers",",","then","you","likely","need","to","update","the","resources","list","in",... | null | def test_benchmark_infra_runs(self) -> None:
"""
Basic smoke test that TorchBench runs.
This test is mainly meant to check that our setup in fbcode
doesn't break.
If you see a failure here related to missing CPP headers, then
you likely need to update the resources list in:
//caffe2:in... | ["def","test_benchmark_infra_runs","(","self",")","-",">","None",":","``","''","''","Basic","smoke","test","that","TorchBench","runs",".","This","test","is","mainly","meant","to","check","that","our","setup","in","fbcode","does","n't","break",".","If","you","see","a","failure","here","related","to","missing","CPP","hea... | 19 | 44 | null | test.py | pytorch/benchmarks/dynamo/test.py | import os
import unittest
from .common import parse_args, run
from .torchbench import setup_torchbench_cwd, TorchBenchmarkRunner | 7 | 1 | 4 | 1 | 1 | 0 | 1 | Use image node_id 1 for calling the TestDynamoBenchmark obj's underlying member method code with example usage: obj.test_benchmark_infra_runs() without return types | 164 | node_id 1 | 271,340 |
forward | C2SimpleNet | null | true | self,niters | This module constructs a net with 'op_name' operator. The net consist
a series of such operator.
It initializes the workspace with input blob equal to the number of parameters
needed for the op.
Provides forward method to run the net niter times. | ["This","module","constructs","a","net","with","'op_name","'","operator",".","The","net","consist","a","series","of","such","operator",".","It","initializes","the","workspace","with","input","blob","equal","to","the","number","of","parameters","needed","for","the","op",".","Provides","forward","method","to","run","the"... | null | null | null | def forward(self, niters):
workspace.RunNet(self.net, niters, False)
| ["def","forward","(","self",",","niters",")",":","workspace.RunNet","(","self.net",",","niters",",","False",")"] | 43 | 44 | null | C2Module.py | pytorch/benchmarks/framework_overhead_benchmark/C2Module.py | import numpy
from caffe2.python import core, workspace
from utils import NUM_LOOP_ITERS | 7 | 1 | 3 | 0 | null | 1 | 2 | Use image node_id 2 for calling the C2SimpleNet obj's underlying member method code with example usage: obj.forward(niters) without return types | 144 | node_id 2 | 271,495 |
read_items | global | null | false | null | null | null | null | list | def read_items() -> List[Item]:
return [
Item(
name="Portal Gun",
description="Device to travel through the multi-rick-verse",
),
Item(name="Plumbus"),
]
| ["def","read_items","(",")","-",">","List","[","Item","]",":","return","[","Item","(","name=","''","Portal","Gun","''",",","description=","''","Device","to","travel","through","the","multi-rick-verse","''",",",")",",","Item","(","name=","''","Plumbus","''",")",",","]"] | 21 | 28 | null | tutorial001.py | fastapi/docs_src/separate_openapi_schemas/tutorial001.py | from typing import List, Union
from fastapi import FastAPI
from pydantic import BaseModel | 7 | null | 3 | null | null | 2 | null | Use image node_id 2 for calling a global function with example usage: read_items() and returns: list | 100 | node_id 2 | 135,815 | |
upgrade | global | null | false | null | null | null | null | null | def upgrade():
"""Apply Add ``RenderedTaskInstanceFields`` table"""
json_type = sa.JSON
conn = op.get_bind()
if conn.dialect.name != "postgresql":
# Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
# versions, check for the function existing.
try:
... | ["def","upgrade","(",")",":","``","''","''","Apply","Add","``","RenderedTaskInstanceFields","``","table","''","''","''","json_type","=","sa.JSON","conn","=","op.get_bind","(",")","if","conn.dialect.name","!","=","``","postgresql","''",":","#","Mysql","5.7+\/MariaDB","10.2.3","has","JSON","support",".","Rather","than","... | 43 | 63 | null | 0053_1_10_10_add_rendered_task_instance_fields_table.py | airflow/airflow/migrations/versions/0053_1_10_10_add_rendered_task_instance_fields_table.py | from __future__ import annotations
import sqlalchemy
from alembic import op
from sqlalchemy import text
from airflow.migrations.db_types import StringID | 7 | null | 5 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: upgrade() without return types | 100 | node_id 1 | 2,595 | |
has_unique_constraint | global | null | false | constraint_name,table_name | null | null | null | null | unknown | def has_unique_constraint(
constraint_name: str, table_name: str
) -> bool:
bind = op.get_bind()
inspector = Inspector.from_engine(bind)
unique_constraints = inspector.get_unique_constraints(table_name)
return constraint_name in {
constraint["name"] for constraint in unique_constraints
}... | ["def","has_unique_constraint","(","constraint_name",":","str",",","table_name",":","str",")","-",">","bool",":","bind","=","op.get_bind","(",")","inspector","=","Inspector.from_engine","(","bind",")","unique_constraints","=","inspector.get_unique_constraints","(","table_name",")","return","constraint_name","in","{","c... | 129 | 133 | null | 2020-11-04_11-06_49b5a32daba5_add_report_schedules.py | superset/superset/migrations/versions/2020-11-04_11-06_49b5a32daba5_add_report_schedules.py | import sqlalchemy
from alembic import op
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.exc import OperationalError | 7 | null | 4 | null | null | 3 | null | Use image node_id 2 for calling a global function with example usage: has_unique_constraint(constraint_name, table_name) and returns: unknown | 141 | node_id 2 | 437,189 |
upgrade | global | null | false | null | null | null | null | null | def upgrade():
op.create_table(
"report_schedule",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("type", sa.String(length=50), nullable=False),
sa.Column(
"name", sa.String(length=150), nullable=False, unique=True
),
sa.Column("description", sa.... | ["def","upgrade","(",")",":","op.create_table","(","``","report_schedule","''",",","sa.Column","(","``","id","''",",","sa.Integer","(",")",",","nullable=False",")",",","sa.Column","(","``","type","''",",","sa.String","(","length=50",")",",","nullable=False",")",",","sa.Column","(","``","name","''",",","sa.String","(","... | 35 | 126 | null | 2020-11-04_11-06_49b5a32daba5_add_report_schedules.py | superset/superset/migrations/versions/2020-11-04_11-06_49b5a32daba5_add_report_schedules.py | import sqlalchemy
from alembic import op
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.exc import OperationalError | 7 | null | 4 | null | null | 3 | null | Use image node_id 1 for calling a global function with example usage: upgrade() without return types | 100 | node_id 1 | 437,188 | |
upgrade | global | null | false | null | null | null | null | null | def upgrade():
# We previously had a KnownEvent's table, but we deleted the table without
# a down migration to remove it (so we didn't delete anyone's data if they
# were happening to use the feature.
#
# But before we can delete the users table we need to drop the FK
conn = op.get_bind()
... | ["def","upgrade","(",")",":","#","We","previously","had","a","KnownEvent","'s","table",",","but","we","deleted","the","table","without","#","a","down","migration","to","remove","it","(","so","we","did","n't","delete","anyone","'s","data","if","they","#","were","happening","to","use","the","feature",".","#","#","But","b... | 39 | 62 | null | 0059_2_0_0_drop_user_and_chart.py | airflow/airflow/migrations/versions/0059_2_0_0_drop_user_and_chart.py | from __future__ import annotations
import sqlalchemy
from alembic import op
from sqlalchemy import inspect, text
from sqlalchemy.dialects import mysql | 7 | null | 5 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: upgrade() without return types | 100 | node_id 1 | 2,607 | |
downgrade | global | null | false | null | null | null | null | null | def downgrade():
conn = op.get_bind()
op.create_table(
"users",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("username", sa.String(length=250), nullable=True),
sa.Column("email", sa.String(length=500), nullable=True),
sa.Column("password", sa.String(255)),
... | ["def","downgrade","(",")",":","conn","=","op.get_bind","(",")","op.create_table","(","``","users","''",",","sa.Column","(","``","id","''",",","sa.Integer","(",")",",","nullable=False",")",",","sa.Column","(","``","username","''",",","sa.String","(","length=250",")",",","nullable=True",")",",","sa.Column","(","``","ema... | 65 | 113 | null | 0059_2_0_0_drop_user_and_chart.py | airflow/airflow/migrations/versions/0059_2_0_0_drop_user_and_chart.py | from __future__ import annotations
import sqlalchemy
from alembic import op
from sqlalchemy import inspect, text
from sqlalchemy.dialects import mysql | 7 | null | 5 | null | null | 2 | null | Use image node_id 2 for calling a global function with example usage: downgrade() without return types | 102 | node_id 2 | 2,608 | |
evaluation | global | null | false | X_selected,n_clusters,y | null | null | null | null | nmi, acc | def evaluation(X_selected, n_clusters, y):
"""
This function calculates ARI, ACC and NMI of clustering results
Input
-----
X_selected: {numpy array}, shape (n_samples, n_selected_features}
input data on the selected features
n_clusters: {int}
number of clusters
y: {n... | ["def","evaluation","(","X_selected",",","n_clusters",",","y",")",":","``","''","''","This","function","calculates","ARI",",","ACC","and","NMI","of","clustering","results","Input","--","--","-","X_selected",":","{","numpy","array","}",",","shape","(","n_samples",",","n_selected_features","}","input","data","on","the","... | 39 | 73 | null | unsupervised_evaluation.py | scikit-feature/skfeature/utility/unsupervised_evaluation.py | import numpy
import sklearn.utils.linear_assignment_
from sklearn.metrics import accuracy_score
from sklearn.metrics.cluster import normalized_mutual_info_score
from sklearn.cluster import KMeans | 7 | null | 5 | null | null | 2 | null | Use image node_id 2 for calling a global function with example usage: evaluation(X_selected, n_clusters, y) and returns: nmi, acc | 130 | node_id 2 | 343,991 |
best_map | global | null | false | l1,l2 | null | null | null | null | new_l2 | def best_map(l1, l2):
"""
Permute labels of l2 to match l1 as much as possible
"""
if len(l1) != len(l2):
print("L1.shape must == L2.shape")
exit(0)
label1 = np.unique(l1)
n_class1 = len(label1)
label2 = np.unique(l2)
n_class2 = len(label2)
n_class = max(n_class1, ... | ["def","best_map","(","l1",",","l2",")",":","``","''","''","Permute","labels","of","l2","to","match","l1","as","much","as","possible","``","''","''","if","len","(","l1",")","!","=","len","(","l2",")",":","print","(","``","L1.shape","must","==","L2.shape","''",")","exit","(","0",")","label1","=","np.unique","(","l1",")"... | 8 | 36 | null | unsupervised_evaluation.py | scikit-feature/skfeature/utility/unsupervised_evaluation.py | import numpy
import sklearn.utils.linear_assignment_
from sklearn.metrics import accuracy_score
from sklearn.metrics.cluster import normalized_mutual_info_score
from sklearn.cluster import KMeans | 7 | null | 5 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: best_map(l1, l2) and returns: new_l2 | 106 | node_id 1 | 343,990 |
encode_base64 | global | null | false | msg | null | null | null | null | null | def encode_base64(msg):
"""Encode the message's payload in Base64.
Also, add an appropriate Content-Transfer-Encoding header.
"""
orig = msg.get_payload(decode=True)
encdata = str(_bencode(orig), "ascii")
msg.set_payload(encdata)
msg["Content-Transfer-Encoding"] = "base64"
| ["def","encode_base64","(","msg",")",":","``","''","''","Encode","the","message","'s","payload","in","Base64",".","Also",",","add","an","appropriate","Content-Transfer-Encoding","header.","``","''","''","orig","=","msg.get_payload","(","decode=True",")","encdata","=","str","(","_bencode","(","orig",")",",","``","ascii"... | 25 | 33 | null | encoders.py | cpython/Lib/email/encoders.py | from base64 import encodebytes
from quopri import encodestring | 7 | null | 2 | null | null | 5 | null | Use image node_id 2 for calling a global function with example usage: encode_base64(msg) without return types | 109 | node_id 2 | 52,445 |
encode_quopri | global | null | false | msg | null | null | null | null | null | def encode_quopri(msg):
"""Encode the message's payload in quoted-printable.
Also, add an appropriate Content-Transfer-Encoding header.
"""
orig = msg.get_payload(decode=True)
encdata = _qencode(orig)
msg.set_payload(encdata)
msg["Content-Transfer-Encoding"] = "quoted-printable"
| ["def","encode_quopri","(","msg",")",":","``","''","''","Encode","the","message","'s","payload","in","quoted-printable",".","Also",",","add","an","appropriate","Content-Transfer-Encoding","header.","``","''","''","orig","=","msg.get_payload","(","decode=True",")","encdata","=","_qencode","(","orig",")","msg.set_payload... | 36 | 44 | null | encoders.py | cpython/Lib/email/encoders.py | from base64 import encodebytes
from quopri import encodestring | 7 | null | 2 | null | null | 5 | null | Use image node_id 3 for calling a global function with example usage: encode_quopri(msg) without return types | 109 | node_id 3 | 52,446 |
encode_7or8bit | global | null | false | msg | null | null | null | null | null | def encode_7or8bit(msg):
"""Set the Content-Transfer-Encoding header to 7bit or 8bit."""
orig = msg.get_payload(decode=True)
if orig is None:
# There's no payload. For backwards compatibility we use 7bit
msg["Content-Transfer-Encoding"] = "7bit"
return
# We play a trick to make ... | ["def","encode_7or8bit","(","msg",")",":","``","''","''","Set","the","Content-Transfer-Encoding","header","to","7bit","or","8bit",".","''","''","''","orig","=","msg.get_payload","(","decode=True",")","if","orig","is","None",":","#","There","'s","no","payload",".","For","backwards","compatibility","we","use","7bit","msg... | 47 | 61 | null | encoders.py | cpython/Lib/email/encoders.py | from base64 import encodebytes
from quopri import encodestring | 7 | null | 2 | null | null | 5 | null | Use image node_id 4 for calling a global function with example usage: encode_7or8bit(msg) without return types | 110 | node_id 4 | 52,447 |
encode_noop | global | null | false | msg | null | null | null | null | null | def encode_noop(msg):
"""Do nothing."""
| ["def","encode_noop","(","msg",")",":","``","''","''","Do","nothing",".","''","''","''"] | 64 | 65 | null | encoders.py | cpython/Lib/email/encoders.py | from base64 import encodebytes
from quopri import encodestring | 7 | null | 2 | null | null | 5 | null | Use image node_id 5 for calling a global function with example usage: encode_noop(msg) without return types | 107 | node_id 5 | 52,448 |
__init__ | Foo | null | true | self | Docstring for class Foo. Example from Sphinx docs. | ["Docstring","for","class","Foo",".","Example","from","Sphinx","docs","."] | null | null | Foo | def __init__(self):
#: Doc comment for instance attribute qux.
self.qux = 3
self.spam = 4
"""Docstring for instance attribute spam."""
| ["def","__init__","(","self",")",":","#",":","Doc","comment","for","instance","attribute","qux",".","self.qux","=","3","self.spam","=","4","``","''","''","Docstring","for","instance","attribute","spam",".","''","''","''"] | 70 | 75 | null | comments.py | black/tests/data/cases/comments.py | import os.path
import sys
import a
from b.c import X | 7 | 1 | 4 | 0 | null | 1 | 1 | Use image node_id 1 to create a new Foo object with example: obj = Foo() | 73 | node_id 1 | 37,107 |
torch_nn_func_batchnorm | global | null | false | input,running_mean,running_var,weight,bias,training,momentum,eps | null | null | null | null | flops, macs | def torch_nn_func_batchnorm(
input: torch.Tensor,
running_mean: Optional[torch.Tensor],
running_var: Optional[torch.Tensor],
weight: Optional[torch.Tensor] = None,
bias: Optional[torch.Tensor] = None,
training: bool = False,
momentum: float = 0.1,
eps: float = 1e-5,
) -> Tuple[int, int]:... | ["def","torch_nn_func_batchnorm","(","input",":","torch.Tensor",",","running_mean",":","Optional","[","torch.Tensor","]",",","running_var",":","Optional","[","torch.Tensor","]",",","weight",":","Optional","[","torch.Tensor","]","=","None",",","bias",":","Optional","[","torch.Tensor","]","=","None",",","training",":","b... | 54 | 70 | null | normalization.py | colossalai/colossalai/fx/profiler/experimental/profiler_function/normalization.py | from typing import List, Optional, Tuple
import torch
from ..registry import meta_profiler_function | 7 | null | 3 | null | null | 4 | null | Use image node_id 4 for calling a global function with example usage: torch_nn_func_batchnorm(input, running_mean, running_var, weight, bias, training, momentum, eps) and returns: flops, macs | 192 | node_id 4 | 41,339 |
torch_nn_func_layernorm | global | null | false | input,normalized_shape,weight,bias,eps | null | null | null | null | flops, macs | def torch_nn_func_layernorm(
input: torch.Tensor,
normalized_shape: List[int],
weight: Optional[torch.Tensor] = None,
bias: Optional[torch.Tensor] = None,
eps: float = 1e-5,
) -> Tuple[int, int]:
has_affine = weight is not None
flops = input.numel() * (5 if has_affine else 4)
macs = 0
... | ["def","torch_nn_func_layernorm","(","input",":","torch.Tensor",",","normalized_shape",":","List","[","int","]",",","weight",":","Optional","[","torch.Tensor","]","=","None",",","bias",":","Optional","[","torch.Tensor","]","=","None",",","eps",":","float","=","1e-5",",",")","-",">","Tuple","[","int",",","int","]",":","... | 40 | 50 | null | normalization.py | colossalai/colossalai/fx/profiler/experimental/profiler_function/normalization.py | from typing import List, Optional, Tuple
import torch
from ..registry import meta_profiler_function | 7 | null | 3 | null | null | 4 | null | Use image node_id 3 for calling a global function with example usage: torch_nn_func_layernorm(input, normalized_shape, weight, bias, eps) and returns: flops, macs | 163 | node_id 3 | 41,338 |
test_model_dump_json | global | null | false | null | null | null | null | null | def test_model_dump_json() -> None:
class A(BaseModel):
a: str
b: int = 2
assert (
model_dump_json(A(a="aaa")).replace(" ", "")
== '{"a":"aaa","b":2}'
)
| ["def","test_model_dump_json","(",")","-",">","None",":","class","A","(","BaseModel",")",":","a",":","str","b",":","int","=","2","assert","(","model_dump_json","(","A","(","a=","''","aaa","''",")",")",".replace","(","``","``",",","``","''",")","==","'","{","``","a","''",":","''","aaa","''",",","''","b","''",":2","}","'... | 36 | 41 | null | test_pydantic.py | autogen/test/test_pydantic.py | from typing import Dict, List, Optional, Tuple, Union
from pydantic import BaseModel, Field
from typing_extensions import Annotated
from autogen._pydantic import model_dump, model_dump_json, type2schema | 7 | null | 4 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: test_model_dump_json() without return types | 113 | node_id 3 | 33,747 | |
test_complex_file | global | null | false | test_case | null | null | null | null | null | def test_complex_file(test_case):
"""Run domain-specific tests from test.py file."""
case_module = __import__(
f"test_cases.{test_case}",
globals=globals(),
locals=locals(),
fromlist=["expectation"],
level=1,
)
test_case_dir = os.path.normcase(os.path.dirname(__fi... | ["def","test_complex_file","(","test_case",")",":","``","''","''","Run","domain-specific","tests","from","test.py","file",".","''","''","''","case_module","=","__import__","(","f","''","test_cases",".","{","test_case","}","''",",","globals=globals","(",")",",","locals=locals","(",")",",","fromlist=","[","``","expectati... | 30 | 52 | null | test_definitions.py | pydocstyle/src/tests/test_definitions.py | import os
import re
import pytest
from pydocstyle.violations import Error, ErrorRegistry
from pydocstyle.checker import check
from pydocstyle.config import ConfigurationParser | 7 | null | 6 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: test_complex_file(test_case) without return types | 119 | node_id 1 | 258,436 |
This dataset is a subset of the matlok python copilot datasets. Please refer to the Multimodal Python Copilot Training Overview for more details on how to use this dataset.
Each row contains python code, either a class method or a global function, imported modules, base classes (if any), exceptions (ordered based off the code), returns (ordered based off the code), arguments (ordered based off the code), and more.
{
"args": "string",
"class_bases": "string",
"class_docstr": "string",
"class_docstr_tok": "string",
"class_name": "string",
"code": "string",
"code_tok": "string",
"docstr": "string",
"docstr_tok": "string",
"file_path": "string",
"filename": "string",
"imports": "string",
"is_member": "bool",
"label_desc": "string",
"label_desc_len": "int64",
"label_id": "string",
"lend": "int64",
"lstart": "int64",
"name": "string",
"num_all_bases": "float64",
"num_bases": "float64",
"num_classes": "float64",
"num_functions": "int64",
"num_imports": "int64",
"num_methods": "float64",
"raises": "string",
"returns": "string",
"total_objects": "int64"
}
from datasets import load_dataset
ds = load_dataset("matlok/python-copilot-training-on-ai-research-repos", data_dir="files")