পাইথনে ব্যবহৃত কিছু শব্দ বা কোড
(৭ম অংশ)
🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥🔥
পাইথনে এমন কিছু মডিউল আছে যেগুলো সাধারণ Linux বা Unix এ ব্যবহার হয় Windows এ নয়। এদের মধ্যে winsound মডিউলটি Windows OS-এ কাজ করে। এমনও কিছু মডিউল আছে যাদের ব্যবহার অতি সীমিত। কিছু আছে Third-party বা যা Standard নয়। এ ধরনের কিছু মডিউল নিয়ে এ অংশে সংক্ষেপে আলোচনা করা হলো —
🔥abc🔥
abc.ABC
abc.ABCMeta
abc.abstractmethod
abc.abstractproperty
🔥accelerate🔥
acc = accelerate
acce = accelerate.Accelerator()
acc.init_empty_weights
acc.load_checkpoint_and_dispatch
acce.autocast()
acce.backward()
acce.clip_grad_norm_()
acce.is_main_process
acce.load()
acce.load_state()
acce.prepare()
acce.save()
acce.seed_everything()
🔥aifc🔥
aifc.open
🔥aiohttp🔥
app = aiohttp.web.Application()
trace = aiohttp.TraceConfig()
ws = aiohttp.web
.WebSocketResponse()
aiohttp.ClientError
aiohttp.ClientSession().get().text()
aiohttp.ClientSession().post()
aiohttp.ClientTimeout(total = 5)
aiohttp.CookieJar()
aiohttp.TCPConnector()
aiohttp.web.Application()
aiohttp.web.HTTPException
aiohttp.web.HTTPException.reason
aiohttp.web.json_response()
aiohttp.web.Response()
aiohttp.web.run_app()
aiohttp.web.WebSocketResponse()
aiohttp.web.WSMsgType.TEXE
app.router.add_get()
trace.on_request_start.append()
ws.prepare()
ws.send_str()
🔥aiosqlite🔥
aiosqlite.connect("test.db") as db
db.commit()
db.execute()
db.execute().fetchall()
db.execute().fetchone()
🔥airflow🔥
ae = airflow.executors
ah = airflow.hooks
am = airflow.models
ao = airflow.operators
ap = airflow.providers
as = airflow.sensors
airflow.DAG
ao.bash.BashOperator
ao.python.PythonOperator
ap.amazon.aws.sensors.s3
.S3KeySensor
ap.http.sensors.http.HttpSensor
as.external_task.ExternalTaskSensor
as.filesystem.FileSensor
as.sql.SqlSensor
🔥alembic🔥
cfg = alembic.config.Config(
"alembic.ini")
alembic.command.upgrade()
alembic.config.Config("alembic.ini")
alembic.context.get_context()
.dialect.name
alembic.runtime.environment
alembic.runtime.migration
cfg.get_main_option(
"sqlalchemy.url")
🔥altair🔥
import altair as alt
import pandas as pd
brush = alt.selection_interval()
data = pd.DataFrame({
"year": [2018, 2019, 2020, 2021],
"sales": [100, 120, 150, 180]
})
alt.Chart(data).mark_bar().encode(
x = "year : O", y = "sales : Q")
alt.Chart(data).mark_line().encode(
x = "year", y = "sales").interactive()
alt.Chart(data).mark_point(
size = 100).encode()
alt.Chart(data).mark_point()
.interactive()
alt.Chart(data).transform_filter(
alt.datum.sales > 100)
alt.Chart(data).mark_point().encode(
x = "age : Q",
y = "salary : Q",
color = alt.condition(brush,
"dept : N", alt.value("lightgray")))
.add_selection(brush)
alt.data_transformers
.disable_max_rows()
alt.selection_interval()
🔥anndata🔥
anndata.AnnData(
X = X, obs = obs, var = var)
anndata = Annotated Data Matrix
🔥ansible🔥
module = ansible.module_utils.basic
.AnsibleModule()
module.exit_json()
module.params['name']
🔥anyio🔥
async with
anyio.create_task_group() as tg
anyio.create_task_group()
anyio.fail_after(2)
anyio.move_on_after(2)
anyio.open_file("test.txt", "w")
anyio.run(main)
anyio.sleep(2)
anyio.to_thread.run_sync(blocking)
tg.start_soon()
🔥antigravity🔥
antigravity.__file__
🔥apiclient🔥
media = googleapiclient.http
.MediaFileUpload("video.mp4",
resumable = True)
request = youtube.videos().insert(
part = "snippet,status",
body = {
"snippet": {"title": "Test Video"},
"status": {"privacyStatus": "private"}
},
media_body = media
)
request.execute()
google.oauth2.credentials
.Credentials
.from_authorized_user_file()
googleapiclient.discovery.build()
.search().list().execute()
🔥app.models🔥
app.models.User.objects.all()
app.models.User.objects.create()
🔥apscheduler🔥
from apscheduler.schedulers
.asyncio import
AsyncIOScheduler
from apscheduler.events import
EVENT_JOB_EXECUTED,
EVENT_JOB_ERROR
from apscheduler.jobstores
.sqlalchemy import
SQLAlchemyJobStore
from apscheduler.schedulers
.background import
BackgroundScheduler
job = scheduler.add_job(
my_task, 'interval', seconds = 10)
scheduler = BackgroundScheduler()
job.pause()
job.remove()
job.resume()
scheduler.add_listener(listener,
EVENT_JOB_EXECUTED
| EVENT_JOB_ERROR)
scheduler.shutdown()
scheduler.start()
🔥argparse🔥
argparse.ArgumentParser
🔥arrow🔥
now = arrow.now()
arrow.get(2025, 2, 16)
arrow.utcnow()
arrow.utcnow().to("Asia/Kolkata")
now.ceil('hour')
now.floor('hour')
now.format(
"YYYY-MM-DD HH:mm:ss")
now.replace(hour = 9, minute = 0)
now.shift(days = 5)
now.shift(hours = -2).humanize()
now.span('day')
now.timestamp()
🔥asgiref🔥
asgiref.local.Local
asgiref.timeout.timeout
asgiref.sync.sync_to_async
asgiref = Asynchronous Server
Gateway Interface Reference
🔥asyncpg🔥
conn = await asyncpg.connect(
user = 'postgres',
password = 'password',
database = 'testdb',
host = '127.0.0.1'
)
pool = await asyncpg.create_pool(
user = 'postgres',
password = 'password',
database = 'testdb',
host = 'localhost',
min_size = 1,
max_size = 10
)
stmt = conn.prepare("SELECT *
FROM users WHERE age > $1")
asyncpg.create_pool(
min_size = 5,
max_size = 50,
max_queries = 50000,
max_inactive_connection
_lifetime = 300)
conn.add_listener(
'user_channel', lambda *args:
print("Notification:", args))
conn.close()
conn.copy_records_to_table(
'users',
records = [
(1, 'Rahim'),
(2, 'Karim'),
(3, 'Salam')
],
columns = ['id', 'name']
)
conn.execute("INSERT INTO
users(name, age)
VALUES($1, $2)", "Rahim", 25)
conn.fetch("SELECT id,
name FROM users")
conn.fetchrow("SELECT * FROM
users WHERE id = $1", 1)
conn.prepare("SELECT *
FROM users WHERE age > $1")
conn.set_type_codec(
'jsonb',
encoder = json.dumps,
decoder = json.loads,
schema = 'pg_catalog')
conn.transaction()
pool.acquire()
pool.close()
stmt.fetch(18)
🔥attrs🔥
@attr.s
attr.asdict()
attr.evolve()
attr.ib()
attr.validators.gt()
attr.validators.instance_of()
🔥auth🔥
auth.generate_token auth.token_required
🔥autograd🔥
autograd.grad
autograd.hessian
autograd.jacobian
autograd.numpy.array([2.0, 3.0])
🔥autokeras🔥
clf = autokeras.ImageClassifier(
max_trials = 3)
clf.evaluate(x_test, y_test)
clf.fit(x_train, y_train, epochs = 3)
🔥authlib🔥
from authlib.integrations
.django_oauth2
import AuthorizationServer
from authlib.oauth2 import grants
app = flask.Flask(__name__)
authlib.jose.jwt.decode()
authlib.jose.jwt.encode()
authlib.integrations.flask_client
.OAuth(app).register()
.authorize_access_token()
grants.ResourceOwnerPasswordCredentialsGrant
🔥awkward🔥
import awkward as ak
arr = ak.Array([[1, 2], [3, 4, 5]])
sequences = ak.Array(
[[1, 2, 3], [4, 5], [6]])
ak.flatten(sequences)
ak.mean(arr)
ak.sum(arr)
ak.to_list(arr)
ak.to_numpy(arr)
ak.to_parquet(arr, "data.parquet")
arr.layout
🔥backoff🔥
backoff.constant
backoff.expo
backoff.full_jitter
@backoff.on_exception()
backoff.on_predicate(backoff.expo)
🔥bcrypt🔥
bcrypt.checkpw()
bcrypt.gensalt()
bcrypt.hashpw()
🔥Bio🔥
bio = Bio.SeqIO.read("example.gb",
"genbank")
bio.id
bio.description
bio.features[0]
Bio.Seq.Seq
Bio.SeqUtils.gc_fraction
🔥carbon🔥
date1 = carbon.Carbon(2025, 1, 1)
date2 = carbon.Carbon(2025, 1, 10)
now = carbon.Carbon.now()
past = now.sub_days(2)
now("Asia/Kolkata")
now.add_days(5)
now.format("Y-m-d H:i:s")
now.sub_hours(3)
date1.is_before(d2)
date2.is_after(d1))
past.diff_for_humans()
🔥cgi🔥
cgi.FieldStorage()
🔥cgitb🔥
cgitb.enable(logdir = "/tmp")
🔥cmath🔥
cmath.abs(1 + 1j)
cmath.phase(1 + 1j)
cmath.sqrt()
cmath.exp(1 + 1j)
cmath.log(1 + 1j)
cmath.log10(1 + 1j)
🔥configparser🔥
config = configparser.ConfigParser()
config["Database']["host"]
config.get(section, option)
config.getint("Database", "port")
config.getfloat()
config.getboolean("Setting'', "debug")
config.has_section(section)
config.options(section)
config.read('config.ini')
config.remove_option(
section, option)
config.remove_section(section)
config.sections()
config.write(configfile)
🔥ctypes🔥
ctypes._aix
ctypes._endian
ctypes.macholib
ctypes.macholib.dyld
ctypes.macholib.dylib
ctypes.macholib.framework
ctypes.util
ctypes.wintypes
🔥database🔥
database.UserDB().get_users()
🔥dbm🔥
dbm.dumb
dbm.gnu
dbm.ndbm
dbm.sqlite3
🔥django🔥
from django.contrib import admin
from django.core.asgi import
get_asgi_application
from django.db import models
admin.site.register(User)
django.http.HttpResponse(
"Hello Django Logging!")
django.http .JsonResponse
g kiet_asgi_application()
models.CASCADE
models.CharField()
models.ForeignKey()
models.IntegerField()
models.Model
models.TextField()
🔥distutils🔥
distutils.core.setup
🔥docker🔥
client = docker.from_env()
container = client.containers.run(
"nginx",
detach = True,
ports = {'80/tcp': 8080}
)
containers = client.containers.list(
all = True)
client.images.build()
client.images.pull()
client.networks.create("mynet")
container.exec_run()
container.kill()
container.logs().decode()
container.stop()
container.remove()
container.restart()
🔥flask🔥
app = flask.Flask(__name__)
app.logger.addHandler(handler)
app.logger.info("Home page visited")
app.logger.exception("Division
error in home route")
app.logger.setLevel(logging.INFO)
app.route("/")
app.run(debug = True)
app.secret_key
flask.jsonify()
flask.redirect
flask.request.headers.get
flask.request.json
flask.url_for
🔥graphviz🔥
dot = graphviz.Digraph()
dot.attr()
dot.edge()
dot.node()
dot.render()
graphviz.Graph()
graphviz.Graph().edge()
🔥http.client🔥
con = http.client.HTTPSConnection()
response = con.getresponse()
con.close()
con.request()
response.read()
response.read().decode()
response.reason
response.status
🔥itsdangerous🔥
import itsdangerous as i
i.URLSafeTimedSerializer()
i.URLSafeTimedSerializer().dumps()
🔥jwt🔥
jwt.decode()
🔥ldap3🔥
ldap3.ALL
ldap3.Connection.bind()
ldap3.Connection.bound
ldap3.Connection.entries
ldap3.Server
🔥lib2to3🔥
lib2to3.refactor.RefactoringTool
lib2to3.refactor
.get_fixers_from_package
lib2to3.refactor.refactor_file()
lib2to3.refactor.refactor_string()
🔥line_profiler🔥
lp = line_profiler.LineProfiler
lp.add_function(work)
lp.run('work()')
lp.print_stats()
🔥machine🔥
machine.Pin
machine.Pin.OUT
🔥magic🔥
mag = magic.Magic(mime = True)
file = mag.from_file("exam.txt")
file.startswith("text")
🔥marshal🔥
marshal.dump (value, file)
marshal.dumps (value)
marshal.load (file)
marshal.loads (bytes)
🔥matplotlib🔥
ani = matplotlib.animation
data = numpy.random.rand(5, 5)
plt = matplotlib.pyplot
x = [1, 2, 3, 4, 5]
y = [2, 4, 6, 8, 10]
ani.FuncAnimation
plt.axis("off")
plt.cla()
plt.clf()
plt.colorbar()
plt.contourf(Z)
plt.figure(figsize = (8, 4))
plt.figure().canvas.mpl_connect()
plt.gca()
plt.gca().twinx()
plt.gcf()
plt.grid(True)
plt.hist(x, bins = 4)
plt.imread("image.png")
plt.imshow(data)
plt.ioff()
plt.ion()
plt.legend()
plt.loglog(x, y)
plt.pause(0.3)
plt.pie()
plt.plot(x, y)
plt.savefig()
plt.scatter(x, y)
plt.semilogy(x, y)
plt.show()
plt.style.use('dark_background')
plt.subplot(1, 2, 1)
plt.subplots().plot(x, y)
plt.subplots().set_title("OOP Style")
plt.title("Simple Line Graph")
plt.use("Agg")
plt.xlabel("X Axis")
plt.ylabel("Y Axis")
🔥model🔥
model.parameters()
🔥models🔥
models.Base.metadata
🔥.models🔥
.models.OAuth2Client,
.models.OAuth2Token
.models.User.objects.create()
.models.User.objects.values()
🔥netrc🔥
auth = netrc.netrc()
auth.authenticators("ftp.exam.com")
netrc.NetrcParseError
🔥network🔥
wifi =
network.WLAN(network.STA_IF)
wifi.active(True)
wifi.connect("Your_SSID",
"Your_PASSWORD")
wifi.isconnected()
🔥nis🔥
nis.cat("passwd.byname")
nis.cat("passwd.byname").keys()
nis.cat("passwd.byname").items()
nis.error
nis.get_default_domain()
nis.get_default_domain().maps()
nis.match("root", "passwd.byname")
.decode().split(":")[0]
🔥nntplib🔥
server = nntplib.NNTP('exem.com')
server2 =
nntplib.NNTP_SSL('exem.com')
server.article()
server.body()
server.capabilities()
server.getwelcome()
server.group()
server.head()
server.list()
server.newnews()
server._shortcmd('DATE')
server.xhdr('subject', '1-10')
nntplib.NNTPPermanentError
nntplib.NNTPTemporaryError
server2.article()
server2.quit()
🔥numpy🔥
import numpy as np
a = np.array([[1, 2], [3, 4]])
b = np.array([[5, 6], [7, 8]])
a.dtype
a.shape
a.size
np.arange(1, 10)
np.array([1, 2, 3, 4])
np.dot(a, b)
np.linalg.det(a)
np.linalg.eig(a)
np.linalg.inv(a)
np.linspace(0, 1, 5)
np.max(a)
np.mean(a)
np.memmap("exmp.com",
dtype = 'float32', mode = 'r')
np.min(a)
np.ones(5)
np.percentile(a, 75)
np.random.normal(0, 1, 100000)
np.random.rand(5, 3)
np.random.randint(1, 10, size = 5)
np.sin(2 * np.pi * t)
np.std(a)
np.sqrt(a)
np.sum(a)
np.var(a)
np.zeros(5)
🔥pandas🔥
pandas.concat(
objs, axis = 0, join = "outer",
ignore_index = False)
pandas.DataFrame(data)
pandas.date_range
pandas.merge(df1, df2, on = 'ID')
pandas.read_csv("data.csv")
pandas.read_excel("data.xlsx")
pandas.read_json("data.json")
pandas.read_sql_query
pandas.Series([10, 20, 30])
🔥pendulum🔥
dt = pendulum.now("Asia/Kolkata")
dt.subtract(days = 5)
.diff_for_humans()
🔥PIL🔥
draw = PIL.ImageDraw.Draw(img)
img = PIL.Image.new(
"RGB", (W, H), "skyblue")
draw.ellipse([230, 180, 530, 420],
fill = "#2E8B57")
draw.polygon([(600, 240),
(630, 230), (600, 220)],
fill = "yellow")
draw.rectangle([350, 350, 430,
520], fill = "#8B5A2B")
PIL.Image
PIL.Image.new("RGB", (W, H),
"skyblue")
PIL.Image.open(image_path)
PIL.ImageDraw
PIL.ImageDraw.Draw(img)
PIL.ImageFilter
img.filter(ImageFilter
.GaussianBlur(5))
🔥plistlib🔥
plistlib.dump(data, f)
plistlib.dumps(data).decode()
plistlib.load(f)
plistlib.loads(plist_string)
plistlib.FMT_BINARY
plistlib.FMT_XML
plistlib.InvalidFileException
🔥poplib🔥
pl = poplib.POP3_SSL(
'pop.gmail.com', 995)
pl.dele(i)
pl.pass_('app_password')
pl.quit()
pl.retr(5)
pl.top(i, 0)
pl.set_debuglevel(1)
pl.stat()
pl.user('your_email@gmail.com')
poplib.POP3()
poplib.POP3_SSL()
🔥psycopg2🔥
psycopg2.connect("dbname = test
user = postgres")
psycopg2.OperationalError
🔥pty🔥
pty.fork()
pty.openpty()
pty.spawn()
🔥py_compile🔥
py_compile.compile()
py_compile.PyCompileError
🔥pytz🔥
india=pytz.timezone("Asia/Kolkata")
now = datetime.datetime.now
utc_time = now(pytz.utc)
india.localize(naive_time)
pytz.all_timezones
pytz.timezone
utc_time.astimezone(india)
🔥readline🔥
readline.add_history("hello")
readline.get_current_history_length()
readline.get_endidx()
readline.get_history_item(i + 1)
readline.get_line_buffer()
readline.parse_and_bind()
readline.read_history_file()
readline.set_completer()
readline.set_history_length(100)
readline.write_history_file
🔥requests🔥
requests.exceptions
.RequestException
requests.get()
requests.get().json()
requests.get().status_code
🔥resource🔥
usage = resource.getrusage(
resource.RUSAGE_SELF)
usage.ru_maxrss
usage.ru_stime
usage.ru_utime
resource.getrusage()
resource.setrlimit()
resource.RLIMIT_AS
resource.RLIMIT_CORE
resource.RLIMIT_CPU
resource.RLIMIT_DATA
resource.RLIMIT_NOFILE
resource.RLIMIT_NPROC
resource.RLIMIT_STACK
resource.RUSAGE_CHILDREN
🔥rest_framework
.authtoken.models🔥
from rest_framework.authtoken
.models import Token
Token.objects.get_or_create()
Token.objects.get_or_create().key
🔥rlcompleter🔥
rlcompleter.Completer().complete()
🔥RPi.GPIO🔥
import RPi.GPIO as GPIO
BUTTON_PIN, LED_PIN = 17, 18
TRIG, ECHO = 23, 24
GPIO.cleanup()
GPIO.input(ECHO)
GPIO.output(LED_PIN, GPIO.HIGH)
GPIO.output(LED_PIN, GPIO.LOW)
GPIO.output(TRIG, False)
GPIO.output(TRIG, True)
GPIO.setmode(GPIO.BCM)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(ECHO, GPIO.IN)
GPIO.setup(TRIG, GPIO.OUT)
GPIO.setup(BUTTON_PIN, GPIO.IN,
pull_up_down = GPIO.PUD_UP)
🔥runpy🔥
runpy.run_module()
runpy.run_path()
runpy.run_path().items()
🔥setuptools🔥
setuptools.find_packages
setuptools.setup
🔥shutil🔥
shutil.copy()
shutil.copy2()
shutil.copyfileobj()
shutil.copytree()
shutil.disk_usage()
shutil.ignore_patterns()
shutil.make_archive()
shutil.move()
shutil.rmtree()
shutil.unpack_archive()
shutil.which()
🔥sklearn🔥
model = sklearn.linear_model
.ElasticNet(alpha = 0.1,
l1_ratio = 0.5)
model = sklearn.linear_model
.HuberRegressor()
model = sklearn.linear_model
.Lasso(alpha = 0.1)
model = sklearn.linear_model
.LinearRegression()
model = sklearn.linear_model
.LogisticRegression()
model = sklearn.linear_model
.Ridge(alpha = 1.0)
model = sklearn.linear_model
.RANSACRegressor()
model = sklearn.linear_model
.SGDClassifier(loss = "log_loss")
model.fit(X, y)
model.partial_fit(X_batch, y_batch)
model.predict([[5]])
sklearn.model_selection
.GridSearchCV().fit()
sklearn.pipeline.Pipeline
🔥sqlalchemy🔥
sqlalchemy.Column
sqlalchemy.Integer
sqlalchemy.String
sqlalchemy.ext.declarative
.declarative_base
🔥ssl🔥
context = ssl.create_default_context()
context.check_hostname
context.verify_mode
context.wrap_socket()
context.wrap_socket().connect()
ssl.CERT_NONE
🔥stat🔥
mode = os.stat("data.txt").st_mode
m1 = os.stat("image.jpg").st_mode
m2 = os.stat("/dev/ttyUSB0")
.st_mode
stat.filemode(m1)
stat.S_ISCHR(m2)
stat.S_ISDIR(mode)
stat.S_ISLNK(mode)
stat.S_ISREG(mode)
stat.S_ISUID
stat.S_IRGRP
stat.S_IRUSR
stat.S_IWOTH
stat.S_IWUSR
stat.S_IXUSR
🔥syslog🔥
syslog.closelog()
syslog.openlog("MyPythonApp")
syslog.syslog("System started")
syslog.LOG_ALERT
syslog.LOG_AUTH
syslog.LOG_CRIT
syslog.LOG_CRON
syslog.LOG_DAEMON
syslog.LOG_DEBUG
syslog.LOG_ERR
syslog.LOG_INFO
syslog.LOG_PID
syslog.LOG_USER
syslog.LOG_WARNING
🔥telnetlib🔥
telnetlib.Telnet(host, port)
🔥tensorflow🔥
tensorflow.keras.datasets.mnist
.load_data()
🔥termios🔥
termios.tcgetattr(fd)
termios.tcsetattr(fd, when,
attributes)
termios.tcflush(fd, queue)
termios.tcdrain(fd)
termios.TCSANOW
termios.TCSADRAIN
termios.TCSAFLUSH
termios.tcgetattr(sys.stdin.fileno())
termios.tcsetattr(sys.stdin.fileno(),
termios.TCSADRAIN, termios
.tcgetattr(sys.stdin.fileno()))
termios.tcsetattr(
os.open("/dev/ttyS0",
os.O_RDWR | os.O_NOCTTY),
termios.TCSANOW, termios
.tcgetattr(os.open("/dev/ttyS0",
os.O_RDWR | os.O_NOCTTY)))
🔥tkinter🔥
tkinter.PhotoImage
tkinter.TclError
🔥this🔥
this.s
🔥torch🔥
import numpy as np
a = np.array([1, 2, 3])
device = torch.device("cuda" if
torch.cuda.is_available()
else "cpu")
scaler=torch.cuda.amp.GradScaler()
state = torch.tensor(env_state)
x = torch.tensor(
2.0, requires_grad = True)
policy_net(state).argmax()
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
torch.autograd.Function
torch.cuda.amp.autocast,
torch.cuda.amp.GradScaler
torch.distributed
.init_process_group("nccl")
torch.from_numpy(a)
torch.jit.script(model)
torch.jit.script(model)
.save("model.pt")
torch.nn.Linear(10, 1)
torch.nn.Module
torch.nn.MSELoss()
torch.randn(3, 3).to(device)
torch.optim.Adam(
model.parameters(), lr = 0.001)
torch.quantization.quantize_dynamic
torch.qint8
torch.utils.data.DataLoader
torch.utils.data.TensorDataset
torch.tensor(a)
x.grad
y.backward()
🔥tty🔥
tty.setcbreak(sys.stdin.fileno())
tty.setraw(sys.stdin.fileno())
🔥urllib🔥
opener=urllib.request.build_opener()
opener.addheaders = [
("User-Agent", "Mozilla/5.0"),
("Accept", "text/html")
]
response = opener.open("https://
www.python.org")
response.read().decode()
*
params = {'q': 'Python', 'page': 1}
qp = urllib.parse.urlencode(params)
response = urllib.request.urlopen(url)
response = urllib.request
.urlopen(url, timeout = 5)
url = "https://www.exam.com"
url = f"https://www.exam.com
/search?{qp}"
urllib.error.HTTPError
urllib.error.URLError
urllib.parse.parse_qs
response.read().decode('utf-8')
🔥webbrowser🔥
webbrowser.BackgroundBrowser
webbrowser.get.open
webbrowser.open
webbrowser.open_new
webbrowser.open_new_tab
webbrowser.register
🔥werkzeug.security🔥
werkzeug.security
.generate_password_hash werkzeug.security
.check_password_hash
🔥winreg🔥
winreg.CloseKey()
winreg.CreateKey()
winreg.DeleteKey()
winreg.DeleteValue()
winreg.OpenKey()
winreg.QueryValueEx()
winreg.SetValueEx()
winreg.HKEY_CLASSES_ROOT
winreg.HKEY_CURRENT_CONFIG
winreg.HKEY_CURRENT_USER
winreg.HKEY_LOCAL_MACHINE
winreg.KEY_SET_VALUE
winreg.REG_SZ
🔥winsound🔥
winsound.Beep(1000, 500)
winsound.MessageBeep()
winsound.PlaySound()
winsound.SND_ASYNC
winsound.SND_FILENAME
winsound.SND_LOOP
winsound.SND_MEMORY
winsound.SND_PURGE
🔥xml🔥
xml.dom
xml.dom.NodeFilter
xml.dom.domreg
xml.dom.expatbuilder
xml.dom.minicompat
xml.dom.minidom
xml.dom.pulldom
xml.dom.xmlbuilder
xml.etree
xml.etree.ElementInclude
xml.etree.ElementPath
xml.etree.ElementTree
xml.etree.cElementTree
xml.parsers
xml.parsers.expat
xml.sax
xml.sax._exceptions
xml.sax.expatreader
xml.sax.handler
xml.sax.saxutils
xml.sax.xmlreader
🔥xmlrpc🔥
xmlrpc
xmlrpc.client
xmlrpc.server
***
#📢শেয়ারচ্যাট স্পেশাল