cookie

نحن نستخدم ملفات تعريف الارتباط لتحسين تجربة التصفح الخاصة بك. بالنقر على "قبول الكل"، أنت توافق على استخدام ملفات تعريف الارتباط.

avatar

TEAM_𝐑𝐂4 ࿕

مشاركات الإعلانات
261
المشتركون
-124 ساعات
-117 أيام
-1730 أيام

جاري تحميل البيانات...

معدل نمو المشترك

جاري تحميل البيانات...

تردون شرح تخطي ssl اغلب تطبيقات و بلجوال؟
إظهار الكل...
👍 4
Api مال يوتيوب و اني جاي العب بلموقع شفته بالصدفة ( يضهر لك معلومات بيض بس يفيد ب بعض الاشياء ) المعلومات هيه : 1 - الصوره المصغره 2 - عنوان فيديو 3 - رابط قناة صاحب الفيديو
يعرضها ك Json
https://www.youtube.com/oembed?format=json&url=رابطك
_______________
يعرضها ك Xml
https://www.youtube.com/oembed?format=xml&url=رابطك
@VIP3GL , @Python_Haider
إظهار الكل...
اتصال يعرض لك معلومات اشتراكك ( Earthlink فقط )
import requests

url = "https://ubapi.earthlink.iq/api/user/GetUserDataAr"

headers = {
  'User-Agent': "Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Mobile Safari/537.36",
  'sec-ch-ua': "\"Google Chrome\";v=\"113\", \"Chromium\";v=\"113\", \"Not-A.Brand\";v=\"24\"",
  'dnt': "1",
  'sec-ch-ua-mobile': "?1",
  'authorization': "undefined undefined",
  'sec-ch-ua-platform': "\"Android\"",
  'origin': "https://user.earthlink.iq",
  'sec-fetch-site': "same-site",
  'sec-fetch-mode': "cors",
  'sec-fetch-dest': "empty",
  'referer': "https://user.earthlink.iq/",
  'accept-language': "ar,en-US;q=0.9,en;q=0.8"
}

response = requests.get(url, headers=headers)

print(response.text)
المصادر : @VIP3GL , @Python_Haider
إظهار الكل...
تحميل من فيسبوك
import requests
from bs4 import BeautifulSoup
import re
import json
import random
import string
from fake_useragent import UserAgent
import os

urlf = input("Enter Your Url : ")
os.system('clear')

def generate_random_cookies():
    return {
        '_ga': ''.join(random.choices(string.ascii_uppercase + string.digits, k=16)),
        '_ga_GF233DD9BC': ''.join(random.choices(string.ascii_uppercase + string.digits, k=16)),
    }

def get_random_user_agent():
    ua = UserAgent()
    return ua.random

url = "https://fbdownloader.to/en"
html_content = requests.get(url).text

k_exp = re.search(r'k_exp\s*=\s*"([^"]+)"', html_content)
k_token = re.search(r'k_token\s*=\s*"([^"]+)"', html_content)

cookies = generate_random_cookies()

headers = {
    'authority': 'fbdownloader.to',
    'accept': '*/*',
    'accept-language': 'en-US,en;q=0.9,ar-EG;q=0.8,ar;q=0.7',
    'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
    'origin': 'https://fbdownloader.to',
    'referer': url,
    'sec-ch-ua': '"Not-A.Brand";v="99", "Chromium";v="124"',
    'sec-ch-ua-mobile': '?1',
    'sec-ch-ua-platform': '"Android"',
    'sec-fetch-dest': 'empty',
    'sec-fetch-mode': 'cors',
    'sec-fetch-site': 'same-origin',
    'user-agent': get_random_user_agent(),
    'x-requested-with': 'XMLHttpRequest',
}

data = {
    'k_exp': k_exp.group(1),
    'k_token': k_token.group(1),
    'q': urlf,
    'lang': 'en',
    'v': 'v2',
    'w': '',
}

response = requests.post('https://fbdownloader.to/api/ajaxSearch', cookies=cookies, headers=headers, data=data)

parsed_data = json.loads(response.text)

html_content = parsed_data['data']

soup = BeautifulSoup(html_content, 'html.parser')

table_mp4 = soup.select_one('.tab__content table:nth-of-type(1)')
if table_mp4:
    highest_quality_row = table_mp4.select_one('tbody tr:first-child')
    if highest_quality_row:
        quality = highest_quality_row.select_one('.video-quality').text.strip()
        download_link = highest_quality_row.select_one('.download-link-fb')['href']
        print(f"Quality: {quality}")
        print(f"Download Link: {download_link}")
المصادر : @VIP3GL , @Python_Haider
إظهار الكل...
تحميل من الانستا
import requests
import json
from fake_useragent import UserAgent
import random
import string
import os
from http.cookiejar import CookieJar, Cookie
link = input("Enter Your Url : ")
os.system("clear")
url = "https://www.save-free.com/process"

payload = {
"instagram_url":link,
"type":"media",
"resource":"save"
}

def random_cookies():
    cookie_name = ''.join(random.choices(string.ascii_letters + string.digits, k=10))
    cookie_value = ''.join(random.choices(string.ascii_letters + string.digits, k=20))
    return f"{cookie_name}={cookie_value};"

user_agent = UserAgent().random

headers = {
    'User-Agent': user_agent,
    'Accept': "text/html, */*; q=0.01",
    'Content-Type': "application/x-www-form-urlencoded",
    'sec-ch-ua': "\"Google Chrome\";v=\"113\", \"Chromium\";v=\"113\", \"Not-A.Brand\";v=\"24\"",
    'dnt': "1",
    'sec-ch-ua-mobile': "?1",
    'x-requested-with': "XMLHttpRequest",
    'x-valy-cache': "accpted",
    'sec-ch-ua-platform': "\"Android\"",
    'origin': "https://www.save-free.com",
    'sec-fetch-site': "same-origin",
    'sec-fetch-mode': "cors",
    'sec-fetch-dest': "empty",
    'referer': "https://www.save-free.com/ar/",
    'accept-language': "ar,en-US;q=0.9,en;q=0.8",
}

headers['Cookie'] = random_cookies()

response = requests.post(url, data=payload, headers=headers)

if response.status_code == 200:
    try:
        download_url = response.json()[0]['url'][0]['urlDownloadable']
        print(download_url)
    except (IndexError, KeyError, ValueError) as e:
        print(f"Failed response JSON: {e}")
else:
    print(f"Request failed with status code: {response.status_code}")
المصادر : @VIP3GL , @Python_Haider
إظهار الكل...
تحميل من تيك توك بدون علامه مائية
import requests
from bs4 import BeautifulSoup
import json
from fake_useragent import UserAgent
import os

url = "https://savetik.co/api/ajaxSearch"
link = input("Enter Your Link : ")
os.system("clear")
payload = f"q={link}&lang=en"

session = requests.Session()

user_agent = UserAgent().random

headers = {
    'User-Agent': user_agent,
    'Content-Type': "application/x-www-form-urlencoded",
    'sec-ch-ua': "\"Google Chrome\";v=\"113\", \"Chromium\";v=\"113\", \"Not-A.Brand\";v=\"24\"",
    'dnt': "1",
    'sec-ch-ua-mobile': "?1",
    'x-requested-with': "XMLHttpRequest",
    'sec-ch-ua-platform': "\"Android\"",
    'origin': "https://savetik.co",
    'sec-fetch-site': "same-origin",
    'sec-fetch-mode': "cors",
    'sec-fetch-dest': "empty",
    'referer': "https://savetik.co/en2",
    'accept-language': "ar,en-US;q=0.9,en;q=0.8"
}

response = session.post(url, data=payload, headers=headers)

if response.status_code == 200:
    try:
        data = response.json()

        soup = BeautifulSoup(data['data'], 'html.parser')
        links = soup.find_all('a', class_='tik-button-dl')
        if len(links) >= 2:
            link = links[1]['href']
            print(link)
        else:
            print("Error: No valid download link found.")
    except json.JSONDecodeError as e:
        print(f"Error decoding JSON: {e}")
else:
    print(f"Request failed with status code {response.status_code}")
المصادر : @VIP3GL , @Python_Haider
إظهار الكل...
👍 1
سكربت انشاء حساب ب موقع freerdp
import requests
from bs4 import BeautifulSoup
from faker import Faker

fake = Faker()

ttoken = input("Enter Your Token : ")
tid = input("Enter Your Id : ")

url_signup = "https://freerdp.one/user/auth/signup"

user_agent = fake.user_agent()

payload = {
    'fname': fake.first_name(),
    'lname': fake.last_name(),
    'email': fake.email(),
    'password': fake.password(length=12),
    'check': 'on',
    'signup': ''
}

headers = {
    'User-Agent': user_agent,
    'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    'Content-Type': "application/x-www-form-urlencoded",
    'cache-control': "max-age=0",
    'sec-ch-ua': "\"Google Chrome\";v=\"113\", \"Chromium\";v=\"113\", \"Not-A.Brand\";v=\"24\"",
    'sec-ch-ua-mobile': "?1",
    'sec-ch-ua-platform': "\"Android\"",
    'origin': "https://freerdp.one",
    'dnt': "1",
    'upgrade-insecure-requests': "1",
    'sec-fetch-site': "same-origin",
    'sec-fetch-mode': "navigate",
    'sec-fetch-user': "?1",
    'sec-fetch-dest': "document",
    'referer': "https://freerdp.one/user/auth/signup",
    'accept-language': "en-US,en;q=0.9,ar-EG;q=0.8,ar;q=0.7"
}

session = requests.Session()

response_signup = session.post(url_signup, data=payload, headers=headers)

if response_signup.status_code == 200:
    try:
        soup_signup = BeautifulSoup(response_signup.content, 'html.parser')

        signup_title_elem = soup_signup.find('h4', class_='card-title text-center')
        if signup_title_elem:
            signup_title = signup_title_elem.text.strip()
        else:
            signup_title = "Signup Title Not Found"

        signup_home_link_elem = soup_signup.find('a', href='https://freerdp.one/')
        if signup_home_link_elem:
            signup_home_link = signup_home_link_elem['href']
        else:
            signup_home_link = "https://freerdp.one/ (Home Link Not Found)"

        telegram_message = f"New Signup Details:\n" \
                           f"First Name: {payload['fname']}\n" \
                           f"Last Name: {payload['lname']}\n" \
                           f"Email: {payload['email']}\n" \
                           f"Password: {payload['password']}\n" \
                           f"Signup Title: {signup_title}\n" \
                           f"Signup Home Link: {signup_home_link}"

        telegram_api_url = f"https://api.telegram.org/bot{ttoken}/sendMessage"

        response_telegram = requests.post(telegram_api_url, data={'chat_id': tid, 'text': telegram_message})

        if response_telegram.status_code == 200:
            print("Signup successful! Message sent to Telegram.")
        else:
            print(f"Failed to send message to Telegram. Status code: {response_telegram.status_code}")

    except AttributeError as e:
        print(f"Error finding elements in signup page: {e}")

else:
    print(f"Signup failed with status code: {response_signup.status_code}")
المصادر : @VIP3GL , @Python_Haider ---------- طلب أحد المتابعين.🌹 ( موقع ما اعرفة )
إظهار الكل...
سكربت ريست pypi
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
import os
ua = UserAgent()
random_user_agent = ua.random

mail = input("Enter Your Mail or User : ")
os.system("clear")
session = requests.Session()

session.headers.update({
    'User-Agent': random_user_agent,
    'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    'Content-Type': "application/x-www-form-urlencoded",
    'cache-control': "max-age=0",
    'sec-ch-ua': "\"Google Chrome\";v=\"113\", \"Chromium\";v=\"113\", \"Not-A.Brand\";v=\"24\"",
    'sec-ch-ua-mobile': "?1",
    'sec-ch-ua-platform': "\"Android\"",
    'origin': "https://pypi.org",
    'dnt': "1",
    'upgrade-insecure-requests': "1",
    'sec-fetch-site': "same-origin",
    'sec-fetch-mode': "navigate",
    'sec-fetch-user': "?1",
    'sec-fetch-dest': "document",
    'referer': "https://pypi.org/account/request-password-reset/",
    'accept-language': "en-US,en;q=0.9,ar-EG;q=0.8,ar;q=0.7"
})

response = session.get("https://pypi.org/account/request-password-reset/")
html_content = response.text
soup = BeautifulSoup(html_content, 'html.parser')
csrf_token = soup.find('input', {'name': 'csrf_token'}).get('value')

url = "https://pypi.org/account/request-password-reset/"
payload = {
    'csrf_token': csrf_token,
    'username_or_email': mail
}

response = session.post(url, data=payload)

soup = BeautifulSoup(response.text, 'html.parser')

res = soup.find('p').get_text()

print(res)
المصادر : @VIP3GL , @Python_Haider
إظهار الكل...
import geocoder
import flag
country_code = geocoder.ip('me').country
country_flag = flag.flag(country_code)
print(country_flag)
سكربت بايثون يفيد أصحاب الادوات يجيبلك علم الدولة مالتك انطلب مني مصادر : @VIP3GL , @Python_Haider
إظهار الكل...
import arabic_reshaper

def haider(text):
    text = arabic_reshaper.reshape(text)
    text = text[::-1]
    return text

text = 'حيدر'
text = haider(text)
print(text)
كود بسيط يعكس النص يفيد أصحاب التصميم أكثر شي للتطبيقات الي ما تدعم عربي اكو شغله غريبه بلكود شنيه ؟😂
إظهار الكل...
اختر خطة مختلفة

تسمح خطتك الحالية بتحليلات لما لا يزيد عن 5 قنوات. للحصول على المزيد، يُرجى اختيار خطة مختلفة.