Commit ace65c6f authored by Murukesh Mohanan's avatar Murukesh Mohanan

remove crap

parent 017cef6a
K 25
svn:wc:ra_dav:version-url
V 62
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/files
END
mtcd.sh
K 25
svn:wc:ra_dav:version-url
V 70
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/files/mtcd.sh
END
CV-LaTeX.tar.gz
K 25
svn:wc:ra_dav:version-url
V 78
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/files/CV-LaTeX.tar.gz
END
curlwc.sh
K 25
svn:wc:ra_dav:version-url
V 72
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/files/curlwc.sh
END
vim.tar.bz2
K 25
svn:wc:ra_dav:version-url
V 74
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/files/vim.tar.bz2
END
10
dir
20
https://github.com/murukeshm/cs699/branches/current/public_html/files
https://github.com/murukeshm/cs699
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
ef29f1dd-d6ce-24b3-4afb-ce57b7db7bff
CV-LaTeX.tar.gz
file
2013-08-15T17:09:10.000000Z
6b56665cc9702f77f2028928ac890021
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
has-props
curlwc.sh
file
2013-08-15T17:09:10.000000Z
f72db6125ee2de89a28b42dd2a59f393
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
vim.tar.bz2
file
2013-08-15T17:09:10.000000Z
2e3d1eb189eb8a4718064f58495e21f2
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
has-props
mtcd.sh
file
2013-08-15T17:09:10.000000Z
3b0b613028af322190cd6297bd7a6b1a
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
K 13
svn:mime-type
V 19
application/x-bzip2
END
#! /bin/bash
# ↄ⃝ Murukesh Mohanan
# This scipt will use curl (assuming it's somehwhere in your $PATH) to download
# a file in pieces of 149 MB, or whatever size you specify (in MB).
# Unfortunately, there's not much support for arguments and switches to the
# script. The currently fixed order is (with default values):
# <target URL> [target directory = $PWD] [piece size in MB = 149]
# The intermediate pieces are stored in adirectory named ".tempdir" in the
# target directory, with the part number (starting from 0) appended to it.
# It so happens that the script will try to resume download using any ".0" part
# in that directory, since my string manipulation isn't that good. Sorry!
# You may modify it as you please, so long as you tell me what you did, so that
# I can make use of any improvements you made. :) Happy downloading!
function INT_cleanup()
{
kill `jobs -p`
exit
}
trap INT_cleanup INT
status=0
temp_dir=".tempdir"
if [ -z $1 ]; then
echo "Usage: "
echo "<script name> URL [Target Directory] [piece size in MB]"
echo "Please specify a URL to download, if you still wanna continue."
read download_URL
else
download_URL="$1"
fi
if [ -z $2 ]; then
working_dir="$PWD"
else
working_dir="$2"
fi
if [ -z $3 ]; then
piece_size=$((149*1024*1024))
else
piece_size=$(($3*1024*1024))
fi
cd $working_dir
file_size=$((`curl $download_URL -s -I -L | grep "200 OK" -A 10 | grep "Content-Length: " | grep '[0-9]*' -o`))
# There's nothing magical about the numbers 16 and 1, they're merely the lengths
# of "Content-Length: " and the "\r" characters at the end of the line
# returned by grep. Didn't make much sense making variables for them.
num_parts=$((file_size / piece_size))
part_size[$num_parts]=$((file_size % piece_size))
if [[ ! -d $temp_dir ]]; then
mkdir $temp_dir
fi
cd $temp_dir
for i in `seq 0 $((num_parts - 1))`; do
part_size[$i]=$((piece_size))
done
if ls | grep ".0" -q; then
file_name="$(ls *.0)"
file_name=${file_name%".0"}
else
curl --remote-name --silent $download_URL --range 0-0 --location
file_name="$(ls)"
part_name="`echo $file_name.0`"
mv $file_name -T "$part_name"
fi
while [ $status -eq 0 ]; do
loop_iterations=`expr $loop_iterations + 1`
for i in `seq 0 $num_parts`; do
part_name="`echo $file_name.$i`"
if [ -e "$part_name" ]; then
current_size=$(stat $part_name -c%s)
if [ $current_size -eq ${part_size[i]} ]; then
echo "Part $i done!"
continue
elif [ $current_size -ge ${part_size[i]} ]; then
echo "Something's wrong with part $i's size. Exiting..."
kill $(jobs -p)
exit
else
echo "Resuming part $i!"
fi
part_begin=$(( i*piece_size + current_size))
else
part_begin=$(( i*piece_size ))
fi
part_end=$((i*piece_size + part_size[i] - 1))
echo "Downloading part $i: From $part_begin till $part_end."
curl $download_URL --location --silent --range $part_begin-$part_end >> "$part_name" &
done
wait
echo "Any cURL processes I started have ended. Let me see if the files have been downloaded completely."
status=1
for i in `seq 0 $num_parts`; do
part_name="`echo $file_name.$i`"
current_size=$(stat $part_name -c%s)
if [ $((current_size - part_size[i])) -lt 0 ]; then
echo "In part $i, $(( part_size[i] - current_size )) bytes remain to be downloaded."
status=$((status && 0))
fi
done
if [ $loop_iterations -eq 10 ]; then
echo "Quiting the task. Something might be wrong, as this the tenth time"
echo "I've tried downloading. Do check what's going wrong. Sorry! :("
exit
fi
done
echo "All files done."
cd $working_dir
if ls | grep -q "$file_name"; then
current_size=$(stat $file_name -c%s)
if [ $((current_size - file_size)) -eq 0 ]; then
echo "A file of matching size and name already exists at the site."
fi
else
cat $temp_dir/$file_name.* > $file_name
downloaded_size=$(stat $file_name -c%s)
if [ $downloaded_size -eq $file_size ]; then
rm $temp_dir -r -f
echo "Done!"
else
echo "Oh, damn! Something's wrong. Better check the file size."
fi
fi
\ No newline at end of file
#! /bin/bash
# ↄ⃝ Murukesh Mohanan
# This script will mount the arguments as ISO9660 in subdirectory of ~/cdrom.
# Given no argument, it will clean up ~/cdrom and unmount any mounted ISOs.
ISODIR=$PWD
CDROOT=~/cdrom
if [ ! -d $CDROOT ]; then
mkdir -p $CDROOT
fi
COUNT=`/usr/bin/ls -1 --reverse $CDROOT/ | head -1`
#cd $CDROOT
for ISO in $@; do
let COUNT=$COUNT+1
mkdir $CDROOT/$COUNT
sudo mount -t iso9660 $ISO $CDROOT/$COUNT
if [[ $? -ne 0 ]]; then
rmdir $CDROOT/$COUNT
fi
done
if [[ $# -eq 0 ]]; then
cd $CDROOT
for i in `/usr/bin/ls`; do
sudo umount $i
rmdir $i
done
fi
\ No newline at end of file
K 25
svn:wc:ra_dav:version-url
V 63
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/images
END
not_sign.png
K 25
svn:wc:ra_dav:version-url
V 76
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/images/not_sign.png
END
Copyleft.png
K 25
svn:wc:ra_dav:version-url
V 76
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/images/Copyleft.png
END
10
dir
20
https://github.com/murukeshm/cs699/branches/current/public_html/images
https://github.com/murukeshm/cs699
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
ef29f1dd-d6ce-24b3-4afb-ce57b7db7bff
Copyleft.png
file
2013-08-15T17:09:10.000000Z
e35f376bc827191666f45f3aaa3f9a1a
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
has-props
not_sign.png
file
2013-08-15T17:09:10.000000Z
5c2260971158aef9c5f2fadbafd3d63c
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
has-props
K 25
svn:wc:ra_dav:version-url
V 63
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/styles
END
acad.css
K 25
svn:wc:ra_dav:version-url
V 72
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/styles/acad.css
END
contact.css
K 25
svn:wc:ra_dav:version-url
V 75
/murukeshm/cs699/!svn/ver/3/branches/current/public_html/styles/contact.css
END
10
dir
20
https://github.com/murukeshm/cs699/branches/current/public_html/styles
https://github.com/murukeshm/cs699
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
ef29f1dd-d6ce-24b3-4afb-ce57b7db7bff
acad.css
file
2013-08-15T17:09:10.000000Z
9adf567ab2ce211468c2fb4f25c7885b
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
contact.css
file
2013-08-15T17:09:10.000000Z
93856f850f5a19ab56361f418c415298
2013-08-07T09:28:53.000000Z
3
murukesh.mohanan
#timetable {
/* borderColor=#000000 width="100%" cellSpacing=0 cellPadding=9 border=1*/
border: 0.1em solid gray;
width: 100%;
margin-left: 0.5em;
}
#timetable td {
border: 0.1em solid gray;
width: 30%;
padding: 1em;
margin: 0;
}
/** The fiirst-child CSS selector is used here to decrease the width
* of the first column.
*/
#timetable td:first-child {
width: 8%;
}
#main li {
list-style-image: url("../images/not_sign.png");
padding: 0.1em;
list-style-position: inside;
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment