Show sourcecode

The following files exists in this folder. Click to view.

webbutv3/

exercices/
exercises.php
header.php
incl/
index.php
ovningar/
project
projects.php
projekt/
reflections.php
source.php
stresstest.php
style.css
test
test.py
upgifter/
viewsource.php
word-app/

test.py

224 lines ASCII Windows (CRLF)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
# Standard imports
from os import remove, path
from zipfile import ZipFile, ZIP_DEFLATED
from datetime import datetime, timezone
import logging
import maxminddb
import json
# Extra imports
from nfstream import NFStreamer
# Local files
import static_files.config as conf
def convert_timestamp(timestamp):
    """
    Gets a 13-digit UNIX timestamp and converts it to a readable string format.
    :return: Timestamp (Str)
    """
    digits = len(str(timestamp))
    # Remove 3 numbers to get under 10 characters
    if digits > 10:
        timestamp = float(timestamp / 1000)
    utc_time = datetime.fromtimestamp(timestamp, timezone.utc)
    local_time = utc_time.astimezone()
    return local_time.strftime("%Y-%m-%d %H:%M:%S.%f")
def check_files():
    """
    Checks the file size. If too big the file gets rotated and zipped.
    """
    csv_file = f"{conf.folder_path}/output/{conf.csv_file}.csv"
    try:
        if path.getsize(csv_file) >= conf.max_file_size:
            conf.numb_zip_files += 1
            mode = "w"
            if path.exists(f"{conf.folder_path}/output/zipped_files/"
                f"{conf.zip_file_name}"): mode = "a"
        # If zipfile exists append to it, else create it
        with ZipFile(f"{conf.folder_path}/output/zipped_files/"
            f"{conf.zip_file_name}", mode, ZIP_DEFLATED) as zipper:
            zipper.write(csv_file, f"flow_{conf.numb_zip_files}.csv")
        logging.info(f"Rotated and zipped current CSV file. "
        f"Zipped filename: flow_{conf.numb_zip_files}.csv")
        remove(csv_file)
    except Exception as e:
        logging.error(f"Failed to rotate and zip file. Error: {e}")
def get_country(ip):
    """
    Gets country based on IP.
    :param ip: IP-address (Str).
    :return: Name of country (Str) and if IP is anonymous proxy (Boolean).
    """
    # Initialize connection to database
    try:
        reader = maxminddb.open_database(f"{conf.folder_path}/static_files"
        f"/GeoLite2-Country.mmdb")
        # Get country from IP
        results = reader.get(str(ip))
        reader.close()
        country = "Unknown"
        if results is None:
            return f"Local IP:{ip}, False"
        if "country" in results:
            country_data = results["country"] # Standard country dict
        elif "registered_country" in results:
            country_data = results["registered_country"] # Anonymous proxy
        elif "continent" in results:
            country_data = results["continent"] # In case the country is not
            # found
        else:
            logging.error(f"No key was found in the data: {results}. IP: {ip}.")
            return f"Unknown (Error), False"
        country = country_data["names"]["en"]
        is_proxy = False
        if "traits" in results:
            if "is_anonymous_proxy" in results["traits"]:
                is_proxy = results["traits"]["is_anonymous_proxy"]

    except Exception as e:
        logging.error(f"Error when getting country from IP. "
        f"IP: {ip}\nError: {e}")
        return f"Error: {ip}, False"
    return f"{country}, {is_proxy}"
def write_global_stats(data, file_name):
    """
    Loads global data file, appends it and write it back.
    :param data: Data to be appended (Dict).
    :param file_name: Filename (Str).
    :return: None.
    """
    # Check if file needs to be created
    if path.isfile(file_name):
        with open(file_name, "r+") as f:
            data_from_file = json.load(f)
            data_from_file["Flows"] += data["Flows"]
            data_from_file["Failed_flows"] += data["Failed_flows"]
            data_from_file["Megabytes"] += data["Megabytes"]
            data_from_file["Packets"] += data["Packets"]
            # Reset file pointer to beginning
            f.seek(0)
            json.dump(data_from_file, f, indent=4)
            # Removes all of the file content after the specified
            # number of bytes (i.e extra stuff)
            f.truncate()
    else:
        with open(file_name, "x") as f:
            json.dump({"Flows": data["Flows"],
            "Failed_flows": data["Failed_flows"],
            "Megabytes": data["Megabytes"],
            "Packets": data["Packets"]}, f, indent=4)
def write_to_flows_csv(data, file_name):
    """
    Create data string for csv file.
    :param data: Data to be appended (Str).
    :param file_name: Filename (Str).
    :return: None.
    """
    if not path.isfile(file_name):
        # If no csv file exist, set first line
        with open(file_name, "w+") as f:
            f.write("App_name, Timestamp, Category, Src_Country, Src_is_proxy,"
            " Dst_Country, Dst_is_proxy, Port,"
            "Bidirectional_duration_ms, If guessed, If HTTP, "
            "content-type \n")
    with open(file_name, "a+") as f:
        f.write(data)
def capture_traffic(network_flows):
    """
    56 Appendix A. Appendix
    Captures data from interface through NFStreamer and collects flow data.
    """
    csv_data = ""
    flows_stored = 0
    global_stats = {"Flows": 0,
    "Failed_flows": 0,
    "Megabytes": 0,
    "Packets": 0}
    logging.info("Acquired NFStream object. Starting flow data collection.")
    for flow in network_flows:
        try:
            # Skip all flows coming from SSH jumphost
            if "SSH" in flow.application_name and conf.proxyhost in flow.src_ip:
                continue
            # Add one to total flows
            flows_stored += 1
            # Classify flow as failed if packet is less
            # than TCP handshake + 1
            if flow.bidirectional_packets <= 4:
                global_stats["Failed_flows"] += 1
                continue
            # Get global statistics
            global_stats["Flows"] += 1
            global_stats["Packets"] += flow.bidirectional_packets
            global_stats["Megabytes"] += flow.bidirectional_bytes / 1000000
            # Append data from current flow
            csv_data += f"{flow.application_name}, "
            csv_data += f"{convert_timestamp(flow.bidirectional_first_seen_ms)}, "
            csv_data += f"{flow.application_category_name}, "
            csv_data += f"{get_country(flow.src_ip)}, "
            csv_data += f"{get_country(flow.dst_ip)}, "
            csv_data += f"{flow.dst_port}, "
            csv_data += f"{flow.bidirectional_duration_ms}, "
            if flow.application_is_guessed == 1:
                csv_data += "True, "
            else:
                csv_data += "False, "
            # Add extra dict keys if HTTP content
            if "HTTP" in flow.application_name:
                csv_data += f"{flow.user_agent}, "
                if flow.content_type:
                    csv_data += f"{flow.content_type}\n"
                else:
                    csv_data += f"No content-type\n"
            else:
                csv_data += "Not HTTP\n"
            if flows_stored >= 100:
                # Write to file and restore values
                write_to_flows_csv(csv_data, f"{conf.folder_path}/output/"
                f"{conf.csv_file}.csv")
                write_global_stats(global_stats, f"{conf.folder_path}/output/"
                f"{conf.global_file}.json")
                csv_data = ""
                global_stats = {"Flows": 0,
                "Failed_flows": 0,
                "Megabytes": 0,
                "Packets": 0}
                flows_stored = 0
                # Check if file rotation is needed
                check_files()
                conf.written_flows += 100
                if conf.written_flows >= 5000:
                    logging.info("Parser has successfully stored 5000 flows.")
                    conf.written_flows = 0
        except Exception as e:
            logging.error(f"Unable to save flow data. Current flow: {flow}"
            f"\nError: {e}")
            global_stats["Failed_flows"] += 1
            continue
def get_streamer():
    """
    Get's an NFStreamer object. Exits the program if it is unable to
    acquire the object.
    :return: NFStreamer object (Class Obj)
    """
    attempts = 0
    while attempts < 3:
        try:
            network_flow = NFStreamer(source=conf.interface_name,
            active_timeout=3600)
            return network_flow
        except Exception as e:
            logging.error(f"Failed to get NFStream object on attempt nr "
            f"{attempts}. Error: {e}")
            attempts += 1
        logging.critical(f"Failed to establish NFStream object on 3 attempts. "
        f"Exiting program.")
        exit(0)
if __name__ == '__main__':
    # Initialize log file
    logging.basicConfig(filename=f'{conf.folder_path}/main.log',
    format='%(levelname)s:%(asctime)s: %(message)s',
    level=logging.DEBUG)
    logging.info("Starting program run...")
    # Start NFStreamer
    nfstream_obj = get_streamer()
    logging.info("Streamer got")
    capture_traffic(nfstream_obj)