Skip to content

Instantly share code, notes, and snippets.

@oliveratgithub
Last active May 20, 2022 13:53
Show Gist options
  • Save oliveratgithub/e503268f66f34c795b556eca5e056bfb to your computer and use it in GitHub Desktop.
Save oliveratgithub/e503268f66f34c795b556eca5e056bfb to your computer and use it in GitHub Desktop.

Revisions

  1. oliveratgithub revised this gist Feb 21, 2022. 1 changed file with 7 additions and 4 deletions.
    11 changes: 7 additions & 4 deletions saunastatsgrabber.py
    Original file line number Diff line number Diff line change
    @@ -3,17 +3,18 @@
    import os
    import datetime
    from bs4 import BeautifulSoup

    webpage='https://www.seebadenge.ch/wp/sauna'
    file_out = '/path/to/saunastats.txt'
    file_out = '/home/inex/SCRIPTS/saunastats/saunastats.txt'
    data_container = 'ase_crowdmonitor'
    repeat=300

    def getFreiePlaetze():
    now = datetime.datetime.now()

    # Open file to append
    sys.stdout = open(file_out, "a")
    sys.stdout = fs = open(file_out, "a")

    # Fetch using curl
    html_doc = os.popen('curl -sSL '+webpage).read()
    if 'curl: (' in html_doc:
    @@ -31,9 +32,11 @@ def getFreiePlaetze():
    finally:
    pass

    fs.close()

    import threading
    import time
    import schedule
    while True:
    getFreiePlaetze()
    time.sleep(repeat)
    time.sleep(repeat)
  2. oliveratgithub revised this gist Feb 21, 2022. 1 changed file with 3 additions and 3 deletions.
    6 changes: 3 additions & 3 deletions saunastatsgrabber.py
    Original file line number Diff line number Diff line change
    @@ -11,16 +11,16 @@
    def getFreiePlaetze():
    now = datetime.datetime.now()

    # Open file to append
    sys.stdout = open(file_out, "a")

    # Fetch using curl
    html_doc = os.popen('curl -sSL '+webpage).read()
    if 'curl: (' in html_doc:
    # On error
    print(str(now)+html_doc)

    else:
    # Open file to append
    sys.stdout = open(file_out, "a")

    # Parse
    try:
    soup = BeautifulSoup(html_doc, 'html.parser')
  3. oliveratgithub revised this gist Feb 21, 2022. 1 changed file with 6 additions and 9 deletions.
    15 changes: 6 additions & 9 deletions saunastatsgrabber.py
    Original file line number Diff line number Diff line change
    @@ -1,15 +1,14 @@
    #!/usr/bin/env python3
    import sys

    import os
    import datetime
    from bs4 import BeautifulSoup
    webpage='https://www.seebadenge.ch/wp/sauna'
    file_out = '/path/to/saunastats.txt'
    data_container = 'ase_crowdmonitor'
    repeat=300

    def getFreiePlaetze():
    import os
    import datetime
    from bs4 import BeautifulSoup
    webpage='https://www.seebadenge.ch/wp/sauna'
    data_container = 'ase_crowdmonitor'
    file_out = '/path/to/saunastats.txt'
    now = datetime.datetime.now()

    # Fetch using curl
    @@ -28,8 +27,6 @@ def getFreiePlaetze():
    fly = soup.find('div', class_='ase_crowdmonitor')
    print(fly.contents[0])
    except:
    import datetime
    now = datetime.datetime.now()
    print(str(now)+': ase_crowdmonitor not found (website down?)')
    finally:
    pass
  4. oliveratgithub revised this gist Feb 21, 2022. 1 changed file with 29 additions and 14 deletions.
    43 changes: 29 additions & 14 deletions saunastatsgrabber.py
    Original file line number Diff line number Diff line change
    @@ -4,24 +4,39 @@
    repeat=300

    def getFreiePlaetze():
    import os
    from bs4 import BeautifulSoup
    webpage='https://www.seebadenge.ch/wp/sauna'
    data_container = 'ase_crowdmonitor'
    file_out = '/path/to/saunastats.txt'
    import os
    import datetime
    from bs4 import BeautifulSoup
    webpage='https://www.seebadenge.ch/wp/sauna'
    data_container = 'ase_crowdmonitor'
    file_out = '/path/to/saunastats.txt'
    now = datetime.datetime.now()

    # Fetch and parse
    html_doc = os.popen('curl -sSL '+webpage).read()
    soup = BeautifulSoup(html_doc, 'html.parser')
    fly = soup.find('div', class_='ase_crowdmonitor')
    # Fetch using curl
    html_doc = os.popen('curl -sSL '+webpage).read()
    if 'curl: (' in html_doc:
    # On error
    print(str(now)+html_doc)

    # Write to file
    sys.stdout = open(file_out, "a")
    print(fly.contents[0])
    else:
    # Open file to append
    sys.stdout = open(file_out, "a")

    # Parse
    try:
    soup = BeautifulSoup(html_doc, 'html.parser')
    fly = soup.find('div', class_='ase_crowdmonitor')
    print(fly.contents[0])
    except:
    import datetime
    now = datetime.datetime.now()
    print(str(now)+': ase_crowdmonitor not found (website down?)')
    finally:
    pass

    import threading
    import time
    import schedule
    while True:
    getFreiePlaetze()
    time.sleep(repeat)
    getFreiePlaetze()
    time.sleep(repeat)
  5. oliveratgithub created this gist Feb 21, 2022.
    21 changes: 21 additions & 0 deletions howto.md
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,21 @@
    # Sauna Stats Grabber
    [Sauna - Seebad Enge](https://www.seebadenge.ch/wp/sauna)

    ## Pre-requisites
    ### Python version
    python3

    ### Python dependencies
    `sudo apt-get install python3-bs4`
    or…
    `pip3 install beautifulsoup4`

    ## Usage
    ### Run stand-alone
    `python3 /path/to/saunastatsgrabber.py`

    ### Run as background process
    `nohup python3 /path/to/saunastatsgrabber.py > /path/to/saunastatsgrabber.log 2>$`

    ### Check running background processes
    `sudo ps -ef | grep python`
    27 changes: 27 additions & 0 deletions saunastatsgrabber.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,27 @@
    #!/usr/bin/env python3
    import sys

    repeat=300

    def getFreiePlaetze():
    import os
    from bs4 import BeautifulSoup
    webpage='https://www.seebadenge.ch/wp/sauna'
    data_container = 'ase_crowdmonitor'
    file_out = '/path/to/saunastats.txt'

    # Fetch and parse
    html_doc = os.popen('curl -sSL '+webpage).read()
    soup = BeautifulSoup(html_doc, 'html.parser')
    fly = soup.find('div', class_='ase_crowdmonitor')

    # Write to file
    sys.stdout = open(file_out, "a")
    print(fly.contents[0])

    import threading
    import time
    import schedule
    while True:
    getFreiePlaetze()
    time.sleep(repeat)