Commit 75f05e03 authored by Tilmann Sager's avatar Tilmann Sager
Browse files

Refactored complete project // detection not working

parent 49ade68f
......@@ -69,7 +69,7 @@ def _is_faulty(path: []):
is_faulty = False
with open(path, 'r') as f:
if not re.match("^[0-9]+$", deque(f, 1)[0].split(',')[0]):
if not re.match("^[0-9]+$", deque(f, 1)[0].split_blocks(',')[0]):
is_faulty = True
return is_faulty
......
......@@ -5,7 +5,7 @@ import numpy as np
import pandas as pd
from constants import Columns as col
from util import fileio
import fileio
DTYPES = {
'FLIGHT_ID': np.int,
......@@ -74,7 +74,7 @@ def _filter_by_time_and_position(day_segment_df: pd.DataFrame, south: float, nor
return computed
def run(granule: {}, params: {}):
def filter(granule: {}, params: {}):
file_processed = _processed_file(granule.get(col.start), granule.get(col.end), granule.get(col.h),
granule.get(col.v))
path_processed = fileio.create_path([params.get('flight_proc_dir'), file_processed])
......
......@@ -11,7 +11,7 @@ from osgeo import gdal
from constants import Columns as col
def run(filepath: str) -> {}:
def extract(filepath: str) -> {}:
hdf_ds = gdal.Open(filepath)
metadata = hdf_ds.GetMetadata_Dict()
datetime_format = "%Y-%m-%dT%H:%M:%S.%fZ"
......
......@@ -35,7 +35,7 @@ def _check_last_line_in_csv(filepath_list: []):
for filepath in filepath_list:
with open(filepath, 'r') as f:
if not re.match("^[0-9]+$", deque(f, 1)[0].split(',')[0]):
if not re.match("^[0-9]+$", deque(f, 1)[0].split_blocks(',')[0]):
files_to_repair.append(filepath)
return files_to_repair
......
......@@ -4,8 +4,9 @@ config
from configparser import ConfigParser
from datetime import date, datetime
from os import cpu_count
from util import fileio
from common.path import check_path
def _today():
......@@ -26,19 +27,25 @@ _straight_key = 'straight'
_plot_key = 'plot'
def init():
def init_params():
cp = ConfigParser()
cp.read(fileio.check_path(['./config.ini'], create=False))
cp.read(check_path(['./config.ini'], create=False))
if bool(int(cp.get(_general_key, 'multiprocessing'))):
num_processes = cpu_count() - 2
else:
num_processes = 1
return {
# General
'multiprocessing': bool(int(cp.get(_general_key, 'multiprocessing'))),
'num_processed': num_processes,
'limit': int(cp.get(_general_key, 'limit')),
# Path
'hdf_dir': fileio.check_path([cp.get(_path_key, 'hdf'), cp.get(_product_key, 'name')], create=False),
'flight_raw_dir': fileio.check_path([cp.get(_path_key, 'flights'), 'raw'], create=False),
'flight_proc_dir': fileio.check_path([cp.get(_path_key, 'flights'), 'processed'], create=True),
'output_dir': fileio.check_path([cp.get(_path_key, 'output'), _now()], create=True),
'hdf_dir': check_path([cp.get(_path_key, 'hdf'), cp.get(_product_key, 'name')], create=False),
'flight_raw_dir': check_path([cp.get(_path_key, 'flights'), 'raw'], create=False),
'flight_proc_dir': check_path([cp.get(_path_key, 'flights'), 'processed'], create=True),
'output_dir': check_path([cp.get(_path_key, 'output'), _now()], create=True),
# Bands
'bands': [int(band.strip()) for band in cp.get(_product_key, 'bands').split(',')],
......@@ -48,20 +55,20 @@ def init():
'band_suffix': cp.get(_product_key, 'band_suffix'),
# Preprocessing
'norm_interval': int(cp.get(_pp_key, 'norm_interval')),
'kernel_size': int(cp.get(_pp_key, 'gauss_kernel_size')),
'k': float(cp.get(_pp_key, 'k')),
'order': int(cp.get(_pp_key, 'order')),
'pp_sigma': float(cp.get(_pp_key, 'sigma')),
'cda_norm_interval': int(cp.get(_pp_key, 'norm_interval')),
'cda_kernel_size': int(cp.get(_pp_key, 'gauss_kernel_size')),
'cda_k': float(cp.get(_pp_key, 'k')),
'cda_order': int(cp.get(_pp_key, 'order')),
'cda_sigma': float(cp.get(_pp_key, 'sigma')),
# Detection
'method': cp.get(_detect_key, 'method'), # "straight" or "probabilistic"
'max_px_size': int(cp.get(_detect_key, 'min_px_size')),
'connectivity': int(cp.get(_detect_key, 'connectivity')),
'split_by': int(cp.get(_detect_key, 'split_by')),
'threshold': int(cp.get(_detect_key, 'threshold')),
'postprocess': bool(int(cp.get(_detect_key, 'postprocess'))),
'ratio_threshold': int(cp.get(_detect_key, 'ratio_threshold')),
'detect_method': cp.get(_detect_key, 'method'), # "straight" or "probabilistic"
'detect_max_px_size': int(cp.get(_detect_key, 'min_px_size')),
'detect_connectivity': int(cp.get(_detect_key, 'connectivity')),
'detect_split_by': int(cp.get(_detect_key, 'split_by')),
'detect_threshold': int(cp.get(_detect_key, 'threshold')),
'detect_postprocess': bool(int(cp.get(_detect_key, 'postprocess'))),
'detect_ratio_threshold': int(cp.get(_detect_key, 'ratio_threshold')),
# Detection - Probabilistic
'prob_line_length': int(cp.get(_probabilistic_key, 'line_length')),
......
from csv import DictWriter
from json import dump
def init_writer(path: str, columns: []):
output_csv = open(path, 'w', newline='')
writer = DictWriter(output_csv, fieldnames=columns)
writer.writeheader()
return writer, output_csv
def write_results(output_file: str, results: [{}], columns: [str]):
with open(output_file, 'w', newline='') as output_csv:
writer = DictWriter(output_csv, fieldnames=columns)
writer.writeheader()
for row in results:
writer.writerow({key: value for key, value in row.items() if key in columns})
def write_config(output_file: str, params):
with open(output_file, "w") as output_json:
dump(params, output_json)
import os
def create_path(path: [str]):
return os.path.join(*path)
def check_path(path: [str], create: bool) -> str:
path = create_path(path)
if create:
os.makedirs(path, exist_ok=True)
if os.path.exists(path):
return path
else:
print(path + ' not found')
[general]
multiprocessing = 1
multiprocessing = 0
limit = 0
[product]
name = MODTBGA
......
"""
CLEANING METHODS
"""
import numpy as np
from skimage.morphology import binary_closing, remove_small_objects, binary_dilation, closing, square
def close_gaps_binary(arr: np.array) -> np.array:
return binary_closing(arr)
def filter_small_objects(arr: np.array, max_size: int, is_bool: bool = False) -> np.array:
if is_bool:
arr = arr.astype(bool)
return remove_small_objects(arr, max_size)
def dilate_binary(arr: np.array) -> np.array:
return binary_dilation(arr)
def threshold_binary(img: np.array, threshold: int) -> np.array:
return img > np.percentile(img, threshold)
def close_gaps_square(img: np.array, square_size: int) -> np.array:
return closing(img, square(square_size))
"""
STRAIGHT HOUGH
"""
import clean
import hough
import split
from constants import Columns as col
def run(granule: {}, params: {}):
if granule.get(col.flight_count) <= 0:
return granule
img = granule.get(col.cda_i)
split_by = 0
# Split blocks
if params.get('split_by') >= 0:
split_by = params.get('split_by')
block_size = granule.get(col.dim)[0] // split_by
blocks = split.split_blocks(img, split_by, block_size)
else:
blocks = [img]
# Cleaning
blocks = [clean.threshold_binary(block, params.get('threshold')) for block in blocks]
blocks = [clean.filter_small_objects(block, params.get('max_px_size')) for block in blocks]
blocks = [clean.close_gaps_binary(block) for block in blocks]
# Detection
if params.get('method') == 'probabilistic':
masks = [hough.probabilistic(block, params) for block in blocks]
else:
masks = [hough.straight(block, params) for block in blocks]
# Combine blocks
mask = split.combine_blocks(masks, split_by)
# Postprocess blocks
if params.get('postprocess'):
mask = hough.probabilistic(mask, params)
granule[col.contrail_mask] = mask
del blocks
del masks
del mask
return granule
import numpy as np
from skimage.feature import canny
from skimage.filters import scharr
from skimage.transform import hough_line, hough_line_peaks, probabilistic_hough_line
from common.constants import Columns as col
from detection.clean import close_gaps_binary, filter_small_objects, threshold_binary
from detection.label import filter_labels, get_line_label_intersections, get_line_type_segments, label_segments, \
line_segment_intersect_prob
from detection.split import split_blocks, combine_blocks
"""
STRAIGHT LINE HOUGH TRANSFORMATION
"""
def straight(img: np.array, params: {}) -> np.array:
tested_angles = np.linspace(-np.pi / 2, np.pi / 2, 360, endpoint=False)
h, theta, d = hough_line(img, theta=tested_angles)
line_peaks = hough_line_peaks(h, theta, d, threshold=params.get('straight_threshold') * h.max())
labeled = label_segments(img)
intersect_labels = get_line_label_intersections(labeled, line_peaks, img.shape)
line_type_labels = get_line_type_segments(labeled, intersect_labels, params.get('detect_ratio_threshold'))
mask = filter_labels(labeled, line_type_labels)
mask = close_gaps_binary(mask)
mask = filter_small_objects(mask, params.get('detect_max_px_size'), True)
return mask
"""
PROBABILISTIC HOUGH TRANSFORMATION
"""
def _canny(img: np.array, sigma: int) -> np.array:
return canny(img, sigma)
def _scharr(img: np.array) -> np.array:
return scharr(img)
# TODO: add thresholding again
def probabilistic(img, params):
if params.get('prob_filter') == 'scharr':
edges = _scharr(img)
else:
edges = _canny(img, params.get('prob_sigma'))
lines = probabilistic_hough_line(edges, params.get('prob_line_length'), params.get('prob_line_gap'))
labeled = label_segments(img)
intersections = line_segment_intersect_prob(labeled, lines)
mask = filter_labels(labeled, intersections)
# mask = _draw_lines(lines, img.shape)
return mask
def detect_lines_blockwise(granule: {}, params: {}):
if granule.get(col.flight_count) <= 0:
return granule
img = granule.get(col.cda_i)
split_by = 0
# Split blocks
if params.get('detect_split_by') >= 0:
split_by = params.get('detect_split_by')
block_size = granule.get(col.dim)[0] // split_by
blocks = split_blocks(img, split_by, block_size)
else:
blocks = [img]
# Cleaning
blocks = [threshold_binary(block, params.get('detect_threshold')) for block in blocks]
blocks = [filter_small_objects(block, params.get('detect_max_px_size')) for block in blocks]
blocks = [close_gaps_binary(block) for block in blocks]
# Detection
if params.get('detect_method') == 'probabilistic':
masks = [probabilistic(block, params) for block in blocks]
else:
masks = [straight(block, params) for block in blocks]
# Combine blocks
mask = combine_blocks(masks, split_by)
# Postprocess blocks
if params.get('detect_postprocess'):
mask = probabilistic(mask, params)
granule[col.contrail_mask] = mask
del blocks
del masks
del mask
return granule
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment