Ejemplo n.º 1
0
def jobstreet():
    url = "https://www.jobstreet.co.id/id/job-search/job-vacancy.php?ojs=10&key=IT"
    user_agent = random.choice(user_agent_list)
    headers = {'User-Agent': user_agent}
    r = requests.get(url, headers=headers, verify=True)
    soup = BeautifulSoup(r.content, "html.parser")
    position = soup.find_all("h2")
    company = soup.find_all("h3", class_='company-name')
    requirement = soup.find_all("ul", class_="list-unstyled hidden-xs")
    link = soup.find_all("a", class_="position-title-link")
    salary = '-'
    progress = [".", "..", "...", "....", "....."]
    for x, y, z, l in zip(position, company, requirement, link):
        id = _serial()
        data = {
            "id": id,
            "position": x.text.strip(),
            "company": y.text.strip(),
            "salary": salary,
            "requirement": z.text,
            "link": l.get("href")
        }
        time.sleep(1)
        print(random.choice(progress))
        db.reference("jobstreet").child(id).update(data)
Ejemplo n.º 2
0
def jooble():
    url = "https://id.jooble.org/m/lowongan-kerja-IT"
    user_agent = random.choice(user_agent_list)
    headers = {'User-Agent': user_agent}
    r = requests.get(url, headers=headers, verify=True)
    soup = BeautifulSoup(r.content, "html.parser")
    #data = soup.find_all("div",class_='caption')
    position = soup.find_all("h2", class_='_1e859')
    company = soup.find_all("span", class_='caption _8d375')
    requirement = soup.find_all("div", class_="_0b1c1")
    salary = soup.find_all("p", class_="_6f85c")
    link = soup.find_all("a", class_="baa11")
    progress = [".", "..", "...", "....", "....."]
    for x, y, z, l, s in zip(position, company, requirement, link, salary):
        id = _serial()
        data = {
            "id": id,
            "position": x.text.strip(),
            "company": y.text.strip(),
            "salary": s.text,
            "requirement": z.text,
            "link": "https://id.jooble.org" + l.get("href")
        }
        time.sleep(1)
        print(random.choice(progress))
        print()
        print(data)
Ejemplo n.º 3
0
def topkarir():
    url = "https://www.topkarir.com/lowongan?search_joblist=IT&adv_inter=&adv_lokasi=&adv_pendidikan=&group=0&adv_posisi=&adv_gaji=&adv_gaji_max=0&adv_industri=0"
    user_agent = random.choice(user_agent_list)
    headers = {'User-Agent': user_agent}
    r = requests.get(url, headers=headers, verify=True)
    soup = BeautifulSoup(r.content, "html.parser")
    #data = soup.find_all("div",class_='caption')
    position = soup.find_all("h3", class_='job-title')
    company = soup.find_all("h2", class_='company-title title')
    requirement = soup.find_all("div", class_="keterangan")
    #salary = soup.find_all("span",class_="")
    link = soup.find_all("a", class_="btn-small lightblue track_alto")
    progress = [".", "..", "...", "....", "....."]
    for x, y, z, l in zip(position, company, requirement, link):
        id = _serial()
        data = {
            "id": id,
            "position": x.text.strip(),
            "company": y.text.strip(),
            "salary": '-',
            "requirement": z.text,
            "link": l.get("data-url")
        }
        time.sleep(1)
        print(random.choice(progress))
        print()
        print(data)
        db.reference("topkarir").child(id).update(data)
Ejemplo n.º 4
0
def karir():
    url = "https://www.karir.com/search"
    user_agent = random.choice(user_agent_list)
    headers = {'User-Agent': user_agent}
    r = requests.get(url, headers=headers, verify=True)
    soup = BeautifulSoup(r.content, "html.parser")
    position = soup.find_all("h4",
                             class_='tdd-function-name --semi-bold --inherit')
    company = soup.find_all("div", class_='tdd-company-name h8 --semi-bold')
    requirement = soup.find_all("span", class_="tdd-experience")
    salary = soup.find_all("span", class_="tdd-salary")
    link = soup.find_all("a", class_="btn --full")
    progress = [".", "..", "...", "....", "....."]
    for x, y, z, l, s in zip(position, company, requirement, link, salary):
        id = _serial()
        data = {
            "id": id,
            "position": x.text.strip(),
            "company": y.text.strip(),
            "salary": s.text,
            "requirement": 'Pengalaman : ' + z.text,
            "link": 'https://www.karir.com' + l.get("href")
        }
        time.sleep(1)
        print(random.choice(progress))
        print()
        pprint(data)
        db.reference("karir").child(id).update(data)
Ejemplo n.º 5
0
def loker_id():
    url = "https://www.loker.id/cari-lowongan-kerja?q=IT&lokasi=0"
    user_agent = random.choice(user_agent_list)
    headers = {'User-Agent': user_agent}
    r = requests.get(url, headers=headers, verify=True)
    soup = BeautifulSoup(r.content, "html.parser")
    data = soup.find_all("div", class_='job-box')
    position = soup.find_all("h3", class_="media-heading h4")
    company = soup.find_all("table", class_='table')
    salary = '-'
    progress = [".", "..", "...", "....", "....."]

    for w, x in zip(position, company):
        l = w.find('a')
        id = _serial()
        data = {
            "id": id,
            "position": w.text.strip(),
            "company": x.text.strip(),
            "salary": salary,
            "requirement": x.text,
            "link": l.get("href")
        }
        time.sleep(1)
        print(random.choice(progress))
        db.reference("lokerid").child(id).update(data)
Ejemplo n.º 6
0
def popl():
    id = _serial()
    a = request.form.get('position')
    b = request.form.get('url')
    data = {"id": id, "position": a, "url": b}
    db.reference("populer").child(id).update(data)
    return redirect(b, code=302)