Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Issue 89 #90

Draft
wants to merge 5 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
23 changes: 16 additions & 7 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM continuumio/miniconda3:4.6.14
FROM continuumio/miniconda3:22.11.1

ENV PYTHONDONTWRITEBYTECODE=true

Expand All @@ -9,7 +9,8 @@ WORKDIR /home/firedpy
RUN conda update conda --yes \
&& conda config --add channels conda-forge \
&& conda config --set channel_priority strict \
&& conda env create -f environment.yaml
&& conda env create -f environment.yaml \
&& echo "conda activate firedpy" >> ~/.bashrc

RUN conda clean --all --yes --force-pkgs-dirs \
&& find /opt/conda/ -follow -type f -name '*.a' -delete \
Expand All @@ -18,15 +19,23 @@ RUN conda clean --all --yes --force-pkgs-dirs \
&& conda list

RUN apt-get update \
&& apt-get install -y --no-install-recommends \
awscli \
htop

&& apt-get install -y htop curl unzip

# Download AWS CLI v2 and install it
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \
&& unzip awscliv2.zip \
&& ./aws/install

# Clean up the downloaded files and temporary packages
RUN rm -rf awscliv2.zip ./aws \
&& apt-get remove -y curl unzip \
&& apt-get clean

# The following line of code solved a problem that apparently is now not happening, and now this creates its own problem.
# If one is trying to do a docker build, and gets an error involving libffi.so.7, uncomment the following lines.
# RUN ln -s /opt/conda/envs/firedpy/lib/libffi.so.6 /opt/conda/envs/firedpy/lib/libffi.so.7 \
# && pip install ipython

SHELL ["conda", "run", "-n", "firedpy", "/bin/bash", "-c"]

RUN python setup.py install
RUN python setup.py install
40 changes: 20 additions & 20 deletions environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,23 @@ name: firedpy
channels:
- defaults
dependencies:
- beautifulsoup4
- dask
- descartes
- gdal
- geopandas
- fiona
- lxml
- matplotlib
- netcdf4
- numpy
- pandas
- pycurl
- pyyaml
- rasterio
- requests
- toolz
- tqdm
- xarray
- paramiko
- pytest-shutil
- beautifulsoup4=4.11.1
- dask=2022.7.0
- descartes=1.1.0
- gdal=3.0.2
- geopandas=0.9.0
- fiona=1.8.13.post1
- lxml=4.9.1
- matplotlib=3.5.2
- netcdf4=1.5.7
- numpy=1.23.1
- pandas=1.4.3
- pycurl=7.45.1
- pyyaml=6.0
- rasterio=1.2.10
- requests=2.28.1
- toolz=0.11.2
- tqdm=4.64.0
- xarray=0.20.1
- paramiko=2.8.1
- pytest-shutil=1.7.0
2 changes: 1 addition & 1 deletion firedpy/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def main():
if landcover_type:
# Earthdata Login
#test url for correct user/password
url = "https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.006/2019.01.01/MCD12Q1.A2019001.h13v12.006.2020212130349.hdf"
url = "https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.061/2019.01.01/MCD12Q1.A2019001.h13v12.061.2022169161130.hdf"

password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None, "https://urs.earthdata.nasa.gov", username, password)
Expand Down
58 changes: 30 additions & 28 deletions firedpy/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def convertDates(array, year):

def convertDate(julien_day, year):
base = dt.datetime(1970, 1, 1)
date = dt.datetime(year, 1, 1) + dt.timedelta(int(julien_day))
date = dt.datetime(year, 1, 1) + dt.timedelta(int(julien_day) - 1)
days = date - base
return days.days

Expand All @@ -87,7 +87,7 @@ def dateRange(perimeter):
if len(perimeter.coords) > 0:
base = dt.datetime(1970, 1, 1)
days = [p[2] for p in perimeter.coords]
day1 = (base + dt.timedelta(days=int(min(days)))).strftime("%Y-%m-%d")
day1 = (base + dt.timedelta(days=int(min(days)) - 1)).strftime("%Y-%m-%d")
else:
day1 = "N/A"
return day1
Expand Down Expand Up @@ -380,7 +380,7 @@ def getBurns(self):
if self.tiles[0].lower() != "all":
tiles = self.tiles
else:
sftp_client.chdir('/data/MODIS/C6/MCD64A1/HDF')
sftp_client.chdir('/data/MODIS/C61/MCD64A1/HDF')
dirs = sftp_client.listdir()
tiles = dirs

Expand All @@ -389,7 +389,7 @@ def getBurns(self):
# Download the available files and catch failed downloads
for tile in tiles:
# Find remote folder for the tile
sftp_folder = '/data/MODIS/C6/MCD64A1/HDF/' + tile
sftp_folder = '/data/MODIS/C61/MCD64A1/HDF/' + tile

# Check if remote folder exists and if not, continue to next tile
try:
Expand Down Expand Up @@ -418,23 +418,24 @@ def getBurns(self):
for yr in yrs:
tile_range.append("MCD64A1.A"+str(yr))
# Attempt file download
try:
for h in tqdm(hdfs):
remote = sftp_folder+"/"+h
for name in tile_range:
if name in h:
os.chdir(folder)
for h in tqdm(hdfs):
remote = sftp_folder+"/"+h
for name in tile_range:
if name in h:
os.chdir(folder)
try:
sftp_client.get(remote, h)
except Exception as e:
print(e)
except Exception as e:
print(e)
elif self.start_yr==None and self.end_yr==None:
try:
for h in tqdm(hdfs):
remote = sftp_folder+"/"+h
os.chdir(folder)

for h in tqdm(hdfs):
remote = sftp_folder+"/"+h
os.chdir(folder)
try:
sftp_client.get(remote, h)
except Exception as e:
print(e)
except Exception as e:
print(e)

except Exception:
print("No MCD64A1 Product for tile: "+str(tile)+", skipping...")
Expand Down Expand Up @@ -485,20 +486,21 @@ def getBurns(self):

for m in missings:
tile = m.split("/")[-2]
sftp_folder = "/MCD64A1/C6/HDF/" + tile
sftp_folder = "/MCD64A1/C61/HDF/" + tile
sftp_client.chdir(sftp_folder)
file = os.path.basename(m)
localpath = os.path.join(self.hdf_path, tile)
trgt = os.path.join(self.hdf_path, tile, file)

# Attempt re-download
try:
for h in tqdm(hdfs):
remote = sftp_folder+"/"+h
os.chdir(localpath)

for h in tqdm(hdfs):
remote = sftp_folder+"/"+h
os.chdir(localpath)
try:
sftp_client.get(remote, localpath)
except Exception as e:
print(e)
except Exception as e:
print(e)

# Check the downloaded file
try:
Expand Down Expand Up @@ -711,7 +713,7 @@ def rebuild_auth(self, prepared_request, response):
print("Connected to 'fuoco.geog.umd.edu' ...")
# Open the connection to the SFTP
sftp_client = ssh_client.open_sftp()
sftp_client.chdir('/data/MODIS/C6/MCD64A1/HDF')
sftp_client.chdir('/data/MODIS/C61/MCD64A1/HDF')
tiles = sftp_client.listdir()
ssh_client.close()
sftp_client.close()
Expand All @@ -725,7 +727,7 @@ def rebuild_auth(self, prepared_request, response):
lc_type = "type" + str(landcover_type)

# Get available years
r = requestIO("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.006/")
r = requestIO("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.061/")
soup = BeautifulSoup(r, 'html.parser')
links = [link["href"] for link in soup.find_all("a", href=True)]
years = [ll[:4] for ll in links if '01.01' in ll]
Expand Down Expand Up @@ -789,7 +791,7 @@ def fileCheck(landcover_path, year, file):
year_tiles = needed_tiles[yr]

# Retrieve list of links to hdf files
url = ("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.006/" + yr + ".01.01/")
url = ("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.061/" + yr + ".01.01/")
r = requestIO(url)
soup = BeautifulSoup(r, 'html.parser')
names = [link["href"] for link in soup.find_all("a", href=True)]
Expand Down