automated terminal push
This commit is contained in:
15
.dockerignore
Normal file
15
.dockerignore
Normal file
@ -0,0 +1,15 @@
|
||||
.git
|
||||
|
||||
.pristine
|
||||
|
||||
.trash
|
||||
|
||||
.recycle
|
||||
|
||||
.backup
|
||||
|
||||
.volumes
|
||||
|
||||
web/
|
||||
|
||||
docs/
|
162
.gitignore
vendored
Normal file
162
.gitignore
vendored
Normal file
@ -0,0 +1,162 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
BIN
.recycle/test/.test.bash.swp
Normal file
BIN
.recycle/test/.test.bash.swp
Normal file
Binary file not shown.
27
.recycle/test/test-complex.bash
Normal file
27
.recycle/test/test-complex.bash
Normal file
@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
|
||||
reset
|
||||
|
||||
clear
|
||||
|
||||
set -e
|
||||
|
||||
set -x
|
||||
|
||||
# Define the API endpoint URL with root and path
|
||||
API_URL="http://aventador.embanet.online:5000/complex_process"
|
||||
|
||||
# Data to send in JSON format
|
||||
DATA='{"string": "test", "num1": 10, "num2": 20}'
|
||||
|
||||
# Send POST request using curl with provided data
|
||||
response=$(curl -s -X POST -H "Content-Type: application/json" -d "$DATA" $API_URL)
|
||||
|
||||
# Check for successful response (exit code 0) and extract the result
|
||||
if [[ $? -eq 0 ]]; then
|
||||
result=$(echo $response | jq -r '.result')
|
||||
echo "API responded successfully with result: $result"
|
||||
else
|
||||
echo "Error: API request failed!"
|
||||
exit 1
|
||||
fi
|
27
.recycle/test/test.bash
Normal file
27
.recycle/test/test.bash
Normal file
@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
|
||||
reset
|
||||
|
||||
clear
|
||||
|
||||
set -e
|
||||
|
||||
set -x
|
||||
|
||||
# Define the API endpoint URL with root and path
|
||||
API_URL="http://aventador.embanet.online:5000/process"
|
||||
|
||||
# Data to send in JSON format
|
||||
DATA='{"string": "test", "num1": 10, "num2": 20}'
|
||||
|
||||
# Send POST request using curl with provided data
|
||||
response=$(curl -s -X POST -H "Content-Type: application/json" -d "$DATA" $API_URL)
|
||||
|
||||
# Check for successful response (exit code 0) and extract the result
|
||||
if [[ $? -eq 0 ]]; then
|
||||
result=$(echo $response | jq -r '.result')
|
||||
echo "API responded successfully with result: $result"
|
||||
else
|
||||
echo "Error: API request failed!"
|
||||
exit 1
|
||||
fi
|
11
Dockerfile
Normal file
11
Dockerfile
Normal file
@ -0,0 +1,11 @@
|
||||
FROM python:3-bookworm
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
CMD ["flask", "run", "--host=0.0.0.0"]
|
75
Jenkinsfile
vendored
Normal file
75
Jenkinsfile
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
pipeline {
|
||||
|
||||
agent none
|
||||
|
||||
options {
|
||||
|
||||
disableConcurrentBuilds(abortPrevious: true)
|
||||
|
||||
buildDiscarder(logRotator(numToKeepStr: '1'))
|
||||
}
|
||||
|
||||
stages {
|
||||
|
||||
stage('docker compose build') {
|
||||
|
||||
agent {
|
||||
|
||||
label "aventador"
|
||||
|
||||
}
|
||||
|
||||
steps {
|
||||
|
||||
dir('.') {
|
||||
|
||||
sh 'docker compose build'
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
stage('docker compose push') {
|
||||
|
||||
agent {
|
||||
|
||||
label "aventador"
|
||||
|
||||
}
|
||||
|
||||
steps {
|
||||
|
||||
dir('.') {
|
||||
|
||||
sh 'docker compose push'
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
stage('prune') {
|
||||
|
||||
agent {
|
||||
|
||||
label "aventador"
|
||||
|
||||
}
|
||||
|
||||
steps {
|
||||
|
||||
dir('.') {
|
||||
|
||||
sh 'docker system prune -a -f'
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}}
|
||||
|
5
LICENSE
5
LICENSE
@ -1,5 +0,0 @@
|
||||
Copyright (C) YEAR by AUTHOR EMAIL
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
17
app.bash
Executable file
17
app.bash
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
##set -e
|
||||
|
||||
##set -x
|
||||
|
||||
reset
|
||||
|
||||
clear
|
||||
|
||||
echo "## launch > flask api"
|
||||
|
||||
pkill flask
|
||||
|
||||
flask run --host=0.0.0.0
|
||||
|
||||
echo "flask api started..."
|
36
app.py
Normal file
36
app.py
Normal file
@ -0,0 +1,36 @@
|
||||
from flask import Flask, request, jsonify
|
||||
from flask_cors import CORS, cross_origin
|
||||
import datetime
|
||||
|
||||
import uuid
|
||||
|
||||
from fetchjobs import writeFile, dowork, read
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
CORS(app)
|
||||
|
||||
@app.route('/')
|
||||
def hello():
|
||||
|
||||
return 'Hello, Universe! With ❤️ from Software Shinobi (www.softwareshinobi.com)'
|
||||
|
||||
@app.route('/jobs/load')
|
||||
def fetchJobs():
|
||||
|
||||
thing = dowork()
|
||||
|
||||
print("thing1: ",thing)
|
||||
|
||||
thing2 = writeFile(thing)
|
||||
|
||||
print("thing2: ",thing2)
|
||||
|
||||
return thing
|
||||
|
||||
@app.route('/jobs/')
|
||||
def ddsd():
|
||||
|
||||
loaded = read()
|
||||
|
||||
return loaded
|
11
compose.bash
Executable file
11
compose.bash
Executable file
@ -0,0 +1,11 @@
|
||||
reset
|
||||
|
||||
clear
|
||||
|
||||
set -e
|
||||
|
||||
set -x
|
||||
|
||||
docker compose down --remove-orphans
|
||||
|
||||
docker compose up --build -d
|
33
compose.yaml
Normal file
33
compose.yaml
Normal file
@ -0,0 +1,33 @@
|
||||
services:
|
||||
|
||||
shinobi-jobs-fetcher-server:
|
||||
|
||||
container_name: shinobi-jobs-fetcher-server
|
||||
|
||||
image: softwareshinobi/shinobi-jobs-fetcher-server
|
||||
|
||||
build:
|
||||
|
||||
context: .
|
||||
|
||||
dockerfile: Dockerfile
|
||||
|
||||
ports:
|
||||
|
||||
- 8888:5000
|
||||
|
||||
shinobi-jobs-fetcher-web:
|
||||
|
||||
container_name: shinobi-jobs-fetcher-web
|
||||
|
||||
image: softwareshinobi/shinobi-jobs-fetcher-web
|
||||
|
||||
build:
|
||||
|
||||
context: web
|
||||
|
||||
dockerfile: Dockerfile
|
||||
|
||||
ports:
|
||||
|
||||
- 8880:80
|
73
fetchjobs.py
Normal file
73
fetchjobs.py
Normal file
@ -0,0 +1,73 @@
|
||||
import csv
|
||||
|
||||
import json
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from jobspy import scrape_jobs
|
||||
|
||||
def dowork():
|
||||
|
||||
jobs = scrape_jobs(
|
||||
|
||||
site_name=["indeed", "linkedin", "zip_recruiter", "glassdoor", "google"],
|
||||
|
||||
search_term="Devops",
|
||||
|
||||
google_search_term="Remote Devops jobs near miami, florida",
|
||||
|
||||
location="miami, fl",
|
||||
|
||||
results_wanted=50,
|
||||
|
||||
hours_old=72, # (only Linkedin/Indeed is hour specific, others round up to days old)
|
||||
|
||||
country_indeed='USA', # only needed for indeed / glassdoor
|
||||
|
||||
)
|
||||
|
||||
print(f"scraper seearch completed. found {len(jobs)} jobs.")
|
||||
|
||||
print(jobs.head())
|
||||
|
||||
# Convert DataFrame to JSON string
|
||||
json_string = jobs.to_json(orient='records')
|
||||
|
||||
print("json",json_string)
|
||||
|
||||
|
||||
# jobs.to_csv("shinob-jobs-report.csv", quoting=csv.QUOTE_NONNUMERIC, escapechar="\\", index=False, sep='|' )
|
||||
|
||||
return json_string
|
||||
|
||||
def writeFile( variable_value):
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
filename = f"shinobi-jobs-report.json"
|
||||
|
||||
with open(filename, "w") as f:
|
||||
|
||||
f.write(str(variable_value))
|
||||
|
||||
def read():
|
||||
|
||||
filename = "shinobi-jobs-report.json"
|
||||
|
||||
try:
|
||||
|
||||
with open(filename, "r") as f:
|
||||
|
||||
return f.read()
|
||||
|
||||
except FileNotFoundError:
|
||||
|
||||
print(f"Error: File '{filename}' not found.")
|
||||
|
||||
return None
|
||||
|
||||
#thing = dowork()
|
||||
|
||||
#print("thing: ",thing)
|
||||
|
||||
#writeFile(thing)
|
37
provision.bash
Executable file
37
provision.bash
Executable file
@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
|
||||
#reset
|
||||
|
||||
#clear
|
||||
#set -e
|
||||
|
||||
#set -x
|
||||
|
||||
sudo apt update
|
||||
|
||||
sudo apt install -y python3-pip
|
||||
|
||||
sudo apt install -y python3-flask
|
||||
|
||||
##sudo apt install -y python3.12-venv
|
||||
|
||||
##python3 -m venv my_venv
|
||||
|
||||
##source my_venv/bin/activate
|
||||
|
||||
pip install -r requirements.txt
|
||||
|
||||
# sudo pip install jobspy --break-system-packages
|
||||
|
||||
## dev notes / we just install it twice. fuck it. i dont care to get cute.
|
||||
|
||||
pip install Flask-CORS
|
||||
|
||||
pip install -U python-jobspy
|
||||
|
||||
#pip[p install -U python-jobspy --break-system-packages ## worked on jacques's garuda box 12/19
|
||||
|
||||
|
||||
|
||||
|
||||
##pip install flask
|
41
readme.md
Normal file
41
readme.md
Normal file
@ -0,0 +1,41 @@
|
||||
# Shinobi Jobs Report
|
||||
|
||||
This Flask application fetches jobs from various job boards and returns them in JSON format.
|
||||
|
||||
<img src="cover.png">
|
||||
|
||||
**Endpoints**
|
||||
|
||||
* **/jobs/load** - This endpoint scrapes jobs from Indeed, LinkedIn, Zip Recruiter, Glassdoor, and Google for "Devops" positions in the Miami, Florida area and returns them as JSON.
|
||||
* **/jobs/** - This endpoint returns the most recent jobs data that was fetched.
|
||||
|
||||
**How to Use**
|
||||
|
||||
1. Clone this repository.
|
||||
2. Install the required libraries using `pip install -r requirements.txt`.
|
||||
3. Run the application using `flask run`.
|
||||
4. To fetch new jobs, make a GET request to `/jobs/load`.
|
||||
5. To access the most recent jobs data, make a GET request to `/jobs/`.
|
||||
|
||||
**Example Usage**
|
||||
|
||||
```
|
||||
curl http://localhost:5000/jobs/load
|
||||
```
|
||||
|
||||
This will return a JSON object containing the scraped jobs data.
|
||||
|
||||
**Explanation of the Code**
|
||||
|
||||
* The `dowork` function scrapes jobs from the specified job boards using the `jobspy` library.
|
||||
* The `writeFile` function writes the scraped jobs data to a JSON file.
|
||||
* The `read` function reads the most recent jobs data from the JSON file.
|
||||
* The `/jobs/load` endpoint calls the `dowork` function to scrape new jobs and then calls the `writeFile` function to save the data. It then returns the JSON data.
|
||||
* The `/jobs/` endpoint calls the `read` function to retrieve the most recent jobs data from the JSON file and returns it.
|
||||
|
||||
**Additional Notes**
|
||||
|
||||
* This is a basic example of a Flask application. You can customize it to fit your specific needs.
|
||||
* The `jobspy` library is not included in this repository. You will need to install it separately using `pip install jobspy`.
|
||||
|
||||
I hope this helps! Let me know if you have any other questions.
|
8
requirements.txt
Normal file
8
requirements.txt
Normal file
@ -0,0 +1,8 @@
|
||||
Flask==1.1.2
|
||||
Jinja2==2.11.2
|
||||
Flask-RESTful==0.3.8
|
||||
MarkupSafe==1.1.1
|
||||
itsdangerous==1.1.0
|
||||
Werkzeug==1.0.1
|
||||
Flask-CORS
|
||||
python-jobspy
|
13
web/.dockerignore
Executable file
13
web/.dockerignore
Executable file
@ -0,0 +1,13 @@
|
||||
.git
|
||||
|
||||
.pristine
|
||||
|
||||
.trash
|
||||
|
||||
.recycle
|
||||
|
||||
.backup
|
||||
|
||||
.template
|
||||
|
||||
.calendar
|
5
web/Dockerfile
Executable file
5
web/Dockerfile
Executable file
@ -0,0 +1,5 @@
|
||||
FROM nginx
|
||||
|
||||
WORKDIR /usr/share/nginx/html/
|
||||
|
||||
COPY . .
|
13
web/compose.bash
Executable file
13
web/compose.bash
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
reset
|
||||
|
||||
clear
|
||||
|
||||
set -e
|
||||
|
||||
set -x
|
||||
|
||||
docker-compose down
|
||||
|
||||
docker-compose up --build
|
17
web/compose.yaml
Normal file
17
web/compose.yaml
Normal file
@ -0,0 +1,17 @@
|
||||
services:
|
||||
|
||||
valorant-digital-workspace-web:
|
||||
|
||||
container_name: valorant-digital-workspace-web
|
||||
|
||||
image: linuxlinape/lenape-jobs-dashboard
|
||||
|
||||
build:
|
||||
|
||||
context: .
|
||||
|
||||
dockerfile: Dockerfile
|
||||
|
||||
ports:
|
||||
|
||||
- 8080:80
|
BIN
web/favicon.ico
Executable file
BIN
web/favicon.ico
Executable file
Binary file not shown.
After Width: | Height: | Size: 1.1 KiB |
81
web/index.html
Normal file
81
web/index.html
Normal file
@ -0,0 +1,81 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
|
||||
<meta charset="UTF-8">
|
||||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
|
||||
<title>Recent Jobs from Internet</title>
|
||||
|
||||
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/css/bootstrap.min.css">
|
||||
|
||||
<link rel="stylesheet" href="https://bootswatch.com/4/journal/bootstrap.min.css">
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-lg bg-dark" data-bs-theme="dark">
|
||||
<div class="container-fluid">
|
||||
<a class="navbar-brand" href="#">Lenape Jobs</a>
|
||||
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarColor02" aria-controls="navbarColor02" aria-expanded="false" aria-label="Toggle navigation">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
</button>
|
||||
<div class="collapse navbar-collapse" id="navbarColor02">
|
||||
<ul class="navbar-nav me-auto">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link active" href="#">Home
|
||||
<span class="visually-hidden">(current)</span>
|
||||
</a>
|
||||
</li>
|
||||
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="#">Load New Jobs</a>
|
||||
</li>
|
||||
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="#">Help</a>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div cldass="container mt-12">
|
||||
|
||||
<h1>Recent Jobs from Internet</h1>
|
||||
|
||||
<button class="btn btn-lg btn-primary" type="button" onclick="load()">Refresh Jobs (Pull From Internet)</button>
|
||||
|
||||
<table id="leaderboard" class="table table-striped table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>source site</th>
|
||||
<th>Company</th>
|
||||
<th>Position</th>
|
||||
<th>Location</th>
|
||||
<th>job type</th>
|
||||
<th>is remote</th>
|
||||
<th>description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<script src="https://code.jquery.com/jquery-3.5.1.slim.min.js"></script>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/popper.js@1.16.1/dist/umd/popper.min.js"></script>
|
||||
|
||||
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/js/bootstrap.min.js"></script>
|
||||
|
||||
<script src="https://code.jquery.com/jquery-2.1.3.js"></script>
|
||||
|
||||
<script src="js/jobs.js"></script>
|
||||
|
||||
</body>
|
||||
</html>
|
120
web/js/jobs.js
Normal file
120
web/js/jobs.js
Normal file
@ -0,0 +1,120 @@
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
fetch();
|
||||
|
||||
setInterval(fetch, 1000*120);
|
||||
|
||||
});
|
||||
function fetch() {
|
||||
|
||||
console.debug(" -> :: fetchLeaderboardDetails()");
|
||||
|
||||
$.ajax({
|
||||
|
||||
type: "GET",
|
||||
|
||||
url: "http://localhost:8888"+ "/jobs/",
|
||||
|
||||
dataType: 'json', // <-- Set dataType to 'json'
|
||||
|
||||
//contentType: "text/plain",
|
||||
|
||||
crossDomain: false,
|
||||
|
||||
success: function (data, status, jqXHR) {
|
||||
|
||||
console.log("good");
|
||||
|
||||
profitleaderboard(data);
|
||||
|
||||
},
|
||||
|
||||
error: function (error, status) {
|
||||
|
||||
console.log("error fetching leaderboard", error);
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
function load() {
|
||||
|
||||
console.debug(" -> :: fetchLeaderboardDetails()");
|
||||
|
||||
$.ajax({
|
||||
|
||||
type: "GET",
|
||||
|
||||
url: "http://localhost:8888"+ "/jobs/load",
|
||||
|
||||
dataType: 'json', // <-- Set dataType to 'json'
|
||||
|
||||
//contentType: "text/plain",
|
||||
|
||||
crossDomain: false,
|
||||
|
||||
success: function (data, status, jqXHR) {
|
||||
|
||||
console.log("good");
|
||||
|
||||
profitleaderboard(data);
|
||||
|
||||
},
|
||||
|
||||
error: function (error, status) {
|
||||
|
||||
console.log("error fetching leaderboard", error);
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function profitleaderboard(jobList) {
|
||||
|
||||
console.log("jobList / " ,jobList);
|
||||
|
||||
var html = '';
|
||||
|
||||
size = jobList.length;
|
||||
|
||||
console.log("size / " + size);
|
||||
|
||||
for (var i =0;i< jobList.length; i++) {
|
||||
|
||||
html += '<tr>';
|
||||
|
||||
html += '<td class="METADATA DEBUG">' + (i+1) + '</td>'; // Integer (ranking)
|
||||
html += '<td class="METADATA DEBUG">' + jobList[i].site + '</td>'; // Integer (ranking)
|
||||
html += '<td class="METADATA DEBUG">' + jobList[i].company + '</td>'; // String (username)
|
||||
|
||||
html += '<td class="METADATA DEBUG"><a target="_job" href="' + jobList[i].job_url + '">'+jobList[i].title+ '</a></td>';
|
||||
|
||||
// html += '<td class="METADATA DEBUG">' + + '</td>'; // String (username)
|
||||
|
||||
html += '<td class="METADATA DEBUG">' + jobList[i].location + '</td>'; // String (username)
|
||||
html += '<td class="METADATA DEBUG">' + jobList[i].job_type + '</td>'; // String (username)
|
||||
html += '<td class="METADATA DEBUG">' + jobList[i].is_remote + '</td>'; // String (username)
|
||||
|
||||
|
||||
|
||||
html += '<td class="METADATA DEBUG">' + jobList[i].description + '</td>'; // String (username)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
html += '</tr>';
|
||||
|
||||
}
|
||||
|
||||
$('#leaderboard > tbody').html(html);
|
||||
|
||||
}
|
Reference in New Issue
Block a user