diff --git a/.dockerignore b/.dockerignore
index edf84398ea3705dd05e2aa2041050e80578fe8ca..1f48ee48fe8e40c97a21f3955e1cd22e413f928c 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,4 +1,4 @@
-.git
-Dockerfile
-*~
+.git
+Dockerfile
+*~
 README.md
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 8c9cf27f74b920afdcd69dcd7bf3838f62465807..b7486adac324d443c4683a166a79eb54fa55eaab 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,128 +1,128 @@
-# VSCode Settings
-.vscode
-
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-pip-wheel-metadata/
-share/python-wheels/
-*.egg-info/
-.installed.cfg
-*.egg
-MANIFEST
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.nox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*.cover
-*.py,cover
-.hypothesis/
-.pytest_cache/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-db.sqlite3
-db.sqlite3-journal
-
-# Flask stuff:
-instance/
-.webassets-cache
-
-# Scrapy stuff:
-.scrapy
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-# Jupyter Notebook
-.ipynb_checkpoints
-
-# IPython
-profile_default/
-ipython_config.py
-
-# pyenv
-.python-version
-
-# pipenv
-#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
-#   However, in case of collaboration, if having platform-specific dependencies or dependencies
-#   having no cross-platform support, pipenv may install dependencies that don't work, or not
-#   install all needed dependencies.
-#Pipfile.lock
-
-# celery beat schedule file
-celerybeat-schedule
-
-# SageMath parsed files
-*.sage.py
-
-# Environments
-.env
-.venv
-env/
-venv/
-ENV/
-env.bak/
-venv.bak/
-
-# Spyder project settings
-.spyderproject
-.spyproject
-
-# Rope project settings
-.ropeproject
-
-# mkdocs documentation
-/site
-
-# mypy
-.mypy_cache/
-.dmypy.json
-dmypy.json
-
-# Pyre type checker
-.pyre/
+# VSCode Settings
+.vscode
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/Dockerfile b/Dockerfile
index 0081df8ed01395816b34aa0ce88a2e9859b6db78..a6fc1da165ceb18fd72ff7817aceb7d610fd41f9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,28 +1,28 @@
-FROM python:3.8-slim-buster
-
-LABEL maintainer="Aiden Gilmartin" \
-    description="Speedtest to InfluxDB data bridge"
-
-# Install dependencies
-ENV DEBIAN_FRONTEND=noninteractive
-RUN apt-get update
-RUN apt-get -q -y install --no-install-recommends apt-utils gnupg1 apt-transport-https dirmngr
-
-# Install speedtest-cli
-RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 379CE192D401AB61
-RUN echo "deb https://ookla.bintray.com/debian buster main" | tee  /etc/apt/sources.list.d/speedtest.list
-RUN apt-get update && apt-get -q -y install speedtest
-
-# Install Python packages
-COPY requirements.txt /
-RUN pip install -r /requirements.txt
-
-# Clean up
-RUN apt-get -q -y autoremove
-RUN apt-get -q -y clean
-RUN rm -rf /var/lib/apt/lists/*
-
-# Final setup & execution
-COPY . /app
-WORKDIR /app
+FROM python:3.8-slim-buster
+
+LABEL maintainer="Josh Smith" \
+    description="Original by Aiden Gilmartin. Speedtest to InfluxDB data bridge"
+#wget "https://ookla.bintray.com/download/$(wget https://ookla.bintray.com/download/ -q -O - | grep x86_64-linux.tgz\" | grep -Po "(?<=href=\")[^^\"]*" | cut -d ":" -f 2)"
+# Install dependencies
+ENV DEBIAN_FRONTEND=noninteractive
+RUN apt-get update
+RUN apt-get -q -y install --no-install-recommends apt-utils gnupg1 apt-transport-https dirmngr
+
+# Install speedtest-cli
+RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 379CE192D401AB61
+RUN echo "deb https://ookla.bintray.com/debian buster main" | tee  /etc/apt/sources.list.d/speedtest.list
+RUN apt-get update && apt-get -q -y install speedtest
+
+# Install Python packages
+COPY requirements.txt /
+RUN pip install -r /requirements.txt
+
+# Clean up
+RUN apt-get -q -y autoremove
+RUN apt-get -q -y clean
+RUN rm -rf /var/lib/apt/lists/*
+
+# Final setup & execution
+COPY . /app
+WORKDIR /app
 CMD ["python3", "-u", "main.py"]
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
index 322ce1137fc47e802b5a4aeddaf9acfeba35a5eb..ccadc038ab986a2768fd38291398e82e6065a6a2 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,21 +1,21 @@
-MIT License
-
-Copyright (c) 2019 Aiden Gilmartin
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+MIT License
+
+Copyright (c) 2019 Aiden Gilmartin
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
index 8b3289e92592e1f8570352c096edddd546d49f53..a3fa86522bd80e24c3bf04293912d3d724502f08 100644
--- a/README.md
+++ b/README.md
@@ -1,47 +1,47 @@
-# Speedtest to InfluxDB
-
-This is a small Python script that will continuously run the Speedtest CLI application by Ookla, reformat the data output and forward it on to an InfluxDB database.
-
-You may want to do this so that you can track your internet connections consistency over time. Using Grafana you can view and explore this data easily.
-
-![Grafana Dashboard](https://i.imgur.com/8cUdMy7.png)
-
-## Using the script
-
-The InfluxDB connection settings are controlled by environment variables.
-
-The variables available are:
-- INFLUX_DB_ADDRESS = 192.168.1.xxx
-- INFLUX_DB_PORT = 8086
-- INFLUX_DB_USER = user
-- INFLUX_DB_PASSWORD = pass
-- INFLUX_DB_DATABASE = speedtest
-- SPEEDTEST_INTERVAL = 1800
-- SPEEDTEST_FAIL_INTERVAL = 300
-
-Be aware that this script will automatically accept the license and GDPR statement so that it can run non-interactively. Make sure you agree with them before running.
-
-### 1. No Container
-
-1. [Install the Speedtest CLI application by Ookla.](https://www.speedtest.net/apps/cli)
-
-    NOTE: The `speedtest-cli` package in distro repositories is an unofficial client. It will need to be uninstalled before installing the Ookla Speedtest CLI application with the directions on their website.
-
-2. Install the InfluxDB client for library from Python.
-
-    `pip install influxdb`
-
-3. Run the script.
-
-    `python3 ./main.py`
-
-### 2. Run with Docker or Podman
-
-1. Build the container.
-
-    `docker build -t aidengilmartin/speedtest-influx ./`
-
-2. Run the container.
-
-    `docker run -d --name speedtest-influx aidengilmartin/speedtest-influx`
-
+# Speedtest to InfluxDB
+
+This is a small Python script that will continuously run the Speedtest CLI application by Ookla, reformat the data output and forward it on to an InfluxDB database.
+
+You may want to do this so that you can track your internet connections consistency over time. Using Grafana you can view and explore this data easily.
+
+![Grafana Dashboard](https://i.imgur.com/8cUdMy7.png)
+
+## Using the script
+
+The InfluxDB connection settings are controlled by environment variables.
+
+The variables available are:
+- INFLUX_DB_ADDRESS = 192.168.1.xxx
+- INFLUX_DB_PORT = 8086
+- INFLUX_DB_USER = user
+- INFLUX_DB_PASSWORD = pass
+- INFLUX_DB_DATABASE = speedtest
+- SPEEDTEST_INTERVAL = 1800
+- SPEEDTEST_FAIL_INTERVAL = 300
+
+Be aware that this script will automatically accept the license and GDPR statement so that it can run non-interactively. Make sure you agree with them before running.
+
+### 1. No Container
+
+1. [Install the Speedtest CLI application by Ookla.](https://www.speedtest.net/apps/cli)
+
+    NOTE: The `speedtest-cli` package in distro repositories is an unofficial client. It will need to be uninstalled before installing the Ookla Speedtest CLI application with the directions on their website.
+
+2. Install the InfluxDB client for library from Python.
+
+    `pip install influxdb`
+
+3. Run the script.
+
+    `python3 ./main.py`
+
+### 2. Run with Docker or Podman
+
+1. Build the container.
+
+    `docker build -t aidengilmartin/speedtest-influx ./`
+
+2. Run the container.
+
+    `docker run -d --name speedtest-influx aidengilmartin/speedtest-influx`
+
diff --git a/main.py b/main.py
index eee8a77e3e62a726b06f98d579a7e4f912fb5e8e..9a2e148642c0ef26d794fdfebf4c582c327915ce 100755
--- a/main.py
+++ b/main.py
@@ -1,103 +1,102 @@
-import time
-import json
-import subprocess
-import os
-
-from influxdb import InfluxDBClient
-
-# InfluxDB Settings
-DB_ADDRESS = os.environ.get('INFLUX_DB_ADDRESS')
-DB_PORT = int(os.environ.get('INFLUX_DB_PORT'))
-DB_USER = os.environ.get('INFLUX_DB_USER')
-DB_PASSWORD = os.environ.get('INFLUX_DB_PASSWORD')
-DB_DATABASE = os.environ.get('INFLUX_DB_DATABASE')
-
-# Speedtest Settings
-TEST_INTERVAL = int(os.environ.get('SPEEDTEST_INTERVAL'))  # Time between tests (in seconds).
-TEST_FAIL_INTERVAL = int(os.environ.get('SPEEDTEST_FAIL_INTERVAL'))  # Time before retrying a failed Speedtest (in seconds).
-
-influxdb_client = InfluxDBClient(
-    DB_ADDRESS, DB_PORT, DB_USER, DB_PASSWORD, None)
-
-
-def init_db():
-    databases = influxdb_client.get_list_database()
-
-    if len(list(filter(lambda x: x['name'] == DB_DATABASE, databases))) == 0:
-        influxdb_client.create_database(
-            DB_DATABASE)  # Create if does not exist.
-    else:
-        influxdb_client.switch_database(DB_DATABASE)  # Switch to if does exist.
-def pkt_loss(data):
-    if data['packetLoss']:
-        return data['packetLoss']
-    else: 
-        return 0
-
-def format_for_influx(cliout):
-    data = json.loads(cliout)
-    # There is additional data in the speedtest-cli output but it is likely not necessary to store.
-    influx_data = [
-        {
-            'measurement': 'ping',
-            'time': data['timestamp'],
-            'fields': {
-                'jitter': data['ping']['jitter'],
-                'latency': data['ping']['latency']
-            }
-        },
-        {
-            'measurement': 'download',
-            'time': data['timestamp'],
-            'fields': {
-                # Byte to Megabit
-                'bandwidth': data['download']['bandwidth'] / 125000,
-                'bytes': data['download']['bytes'],
-                'elapsed': data['download']['elapsed']
-            }
-        },
-        {
-            'measurement': 'upload',
-            'time': data['timestamp'],
-            'fields': {
-                # Byte to Megabit
-                'bandwidth': data['upload']['bandwidth'] / 125000,
-                'bytes': data['upload']['bytes'],
-                'elapsed': data['upload']['elapsed']
-            }
-        },
-        {
-            'measurement': 'packetLoss',
-            'time': data['timestamp'],
-            'fields': {
-                'packetLoss': pkt_loss(data)
-            }
-        }
-    ]
-
-    return influx_data
-
-
-def main():
-    init_db()  # Setup the database if it does not already exist.
-
-    while (1):  # Run a Speedtest and send the results to influxDB indefinitely.
-        speedtest = subprocess.run(
-            ["speedtest", "--accept-license", "--accept-gdpr", "-f", "json"], capture_output=True)
-
-        if speedtest.returncode == 0:  # Speedtest was successful.
-            data = format_for_influx(speedtest.stdout)
-            print("Speedtest Successful:")
-            if influxdb_client.write_points(data) == True:
-                print("Data written to DB successfully")
-                time.sleep(TEST_INTERVAL)
-        else:  # Speedtest failed.
-            print("Speedtest Failed:")
-            print(speedtest.stderr)
-            print(speedtest.stdout)
-            time.sleep(TEST_FAIL_INTERVAL)
-
-
-if __name__ == '__main__':
-    print('Speedtest CLI Data Logger to InfluxDB')
+import time
+import json
+import subprocess
+import os
+from influxdb import InfluxDBClient
+
+# InfluxDB Settings
+DB_ADDRESS = os.environ.get('INFLUX_DB_ADDRESS')
+DB_PORT = int(os.environ.get('INFLUX_DB_PORT'))
+DB_USER = os.environ.get('INFLUX_DB_USER')
+DB_PASSWORD = os.environ.get('INFLUX_DB_PASSWORD')
+DB_DATABASE = os.environ.get('INFLUX_DB_DATABASE')
+
+# Speedtest Settings
+TEST_INTERVAL = int(os.environ.get('SPEEDTEST_INTERVAL'))  # Time between tests (in seconds).
+TEST_FAIL_INTERVAL = int(os.environ.get('SPEEDTEST_FAIL_INTERVAL'))  # Time before retrying a failed Speedtest (in seconds).
+
+influxdb_client = InfluxDBClient(
+    DB_ADDRESS, DB_PORT, DB_USER, DB_PASSWORD, None)
+
+
+def init_db():
+    databases = influxdb_client.get_list_database()
+
+    if len(list(filter(lambda x: x['name'] == DB_DATABASE, databases))) == 0:
+        influxdb_client.create_database(
+            DB_DATABASE)  # Create if does not exist.
+    else:
+        influxdb_client.switch_database(DB_DATABASE)  # Switch to if does exist.
+def pkt_loss(data):
+    if 'packetLoss' in data.keys():
+        return data['packetLoss']
+    else: 
+        return 0
+
+def format_for_influx(cliout):
+    data = json.loads(cliout)
+    # There is additional data in the speedtest-cli output but it is likely not necessary to store.
+    influx_data = [
+        {
+            'measurement': 'ping',
+            'time': data['timestamp'],
+            'fields': {
+                'jitter': data['ping']['jitter'],
+                'latency': data['ping']['latency']
+            }
+        },
+        {
+            'measurement': 'download',
+            'time': data['timestamp'],
+            'fields': {
+                # Byte to Megabit
+                'bandwidth': data['download']['bandwidth'] / 125000,
+                'bytes': data['download']['bytes'],
+                'elapsed': data['download']['elapsed']
+            }
+        },
+        {
+            'measurement': 'upload',
+            'time': data['timestamp'],
+            'fields': {
+                # Byte to Megabit
+                'bandwidth': data['upload']['bandwidth'] / 125000,
+                'bytes': data['upload']['bytes'],
+                'elapsed': data['upload']['elapsed']
+            }
+        },
+        {
+            'measurement': 'packetLoss',
+            'time': data['timestamp'],
+            'fields': {
+                'packetLoss': pkt_loss(data)
+            }
+        }
+    ]
+
+    return influx_data
+
+
+def main():
+    init_db()  # Setup the database if it does not already exist.
+
+    while (1):  # Run a Speedtest and send the results to influxDB indefinitely.
+        speedtest = subprocess.run(
+            ["speedtest", "--accept-license", "--accept-gdpr", "-f", "json"], capture_output=True)
+
+        if speedtest.returncode == 0:  # Speedtest was successful.
+            data = format_for_influx(speedtest.stdout)
+            print("Speedtest Successful:")
+            if influxdb_client.write_points(data) == True:
+                print("Data written to DB successfully")
+                time.sleep(TEST_INTERVAL)
+        else:  # Speedtest failed.
+            print("Speedtest Failed:")
+            print(speedtest.stderr)
+            print(speedtest.stdout)
+            time.sleep(TEST_FAIL_INTERVAL)
+
+
+if __name__ == '__main__':
+    print('Speedtest CLI Data Logger to InfluxDB')
     main()
\ No newline at end of file