[WYSIWYGԽ]


Hadoop

Github

iumfs.tar.gz
iumfs-hdfs.tar.gz
http://github.com/kaizawa/iumfs
http://github.com/kaizawa/iumfs-hdfs


Sparc
(32bit)
Sparc
(64bit)
x86x64
Solaris10-??
OpenSolaris-?
OpenIndiana 148-??
Solaris11 express-??
Solaris11-??


ByteBuffer

  1. Solaris/OpenSolaris()

OpenSolaris

$ cd kaizawa-iumfs-4f1da99
$ ./configure
checking for gcc... no
checking for cc... cc
checking for C compiler default output file name... a.out
checking whether the C compiler works... yes
checking whether we are cross compiling... no
checking for suffix of executables...
checking for suffix of object files... o
checking whether we are using the GNU C compiler... no
checking whether cc accepts -g... yes
checking for cc option to accept ISO C89... none needed
checking for a BSD-compatible install... /usr/bin/ginstall -c
checking for isainfo... yes
configure: creating ./config.status
config.status: creating Makefile
$ make
cd module ; make
gcc -c -DPACKAGE_NAME="IUMFS" -DPACKAGE_TARNAME="iumfs" -DPACKAGE_VERSION="0.1" -DPACKAGE_STRING="IUMFS 0.1.2" 
-DPACKAGE_BUGREPORT="admin2@whiteboard.ne.jp" -g -O2 -Wall -DOPENSOLARIS -DSOL10 -m64 -mcmodel=kernel 
-mno-red-zone -D_KERNEL -I. iumfs_vfsops.c -o iumfs_vfsops.o
gcc -c -DPACKAGE_NAME="IUMFS" -DPACKAGE_TARNAME="iumfs" -DPACKAGE_VERSION="0.1" -DPACKAGE_STRING="IUMFS 0.1.2"
 -DPACKAGE_BUGREPORT="admin2@whiteboard.ne.jp" -g -O2 -Wall -DOPENSOLARIS -DSOL10 -m64 -mcmodel=kernel
 -mno-red-zone -D_KERNEL -I. iumfs_vnops.c -o iumfs_vnops.o
:
# make install
cd module ; make install
/bin/ginstall -c -m 0644 -o root -g sys iumfs /kernel/fs/amd64
/bin/ginstall -c -m 0644 -o root -g sys iumfs.conf /usr/kernel/drv
 :	

$ export HADOOP_HOME=/usr/local/hadoop
$ export JAVA_HOME=/usr/java
$ cd cmd
$ ant
Buildfile: build.xml

compile:
   [javac] Compiling 12 source files to /var/tmp/hdfs/cmd/build

dist:
     [jar] Building jar: /var/tmp/hdfs/cmd/hdfsd.jar

BUILD SUCCESSFUL
Total time: 2 seconds			  						  

# make uninstall


<property>
    <name>dfs.support.append</name>
    <value>true</value>
</property>

 

$ echo aaa >> hoge
-ksh: echo: write to 1 failed [I/O error

$ ./start-hdfsd.sh
% ${HADOOP_HOME}/bin/hadoop -cp ${CLASSPATH} iumfs.hdfs.Main

$ hadoop -cp "./cmd/hdfsd.jar:./cmd/lib/iumfs-daemon-core-0.2.0.jar:/usr/local/hadoop/conf:\
/usr/local/hadoop/hadoop-core-1.0.2.jar" iumfs.hdfs.Main

java.util.Logger

$ hadoop  -Djava.util.logging.config.file=cmd/log.prop -cp "./cmd/hdfsd.jar:./cmd/lib/iumfs-daemon-core-0.2.0.jar:\
/usr/local/hadoop/conf:/usr/local/hadoop/hadoop-core-1.0.2.jar" iumfs.hdfs.Main
2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

2012/04/30 12:10:50 iumfs.ControlDevicePollingThread run

:

NameNode

# /usr/sbin/mount -F iumfs hdfs://name_node/base_path mount_point
name_node
NameNode
base_path
mount_point

NameNode

# /usr/sbin/mount -F iumfs hdfs://namenode.example.com/user/myname  /mnt
                                 ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^  ^^^^
# /usr/sbin/mount
 :
 :
/mnt on hdfs://namenode.example.com/user/myname read/write/setuid/devices/dev=91c0004 on Wed Jun  8 00:41:35 2011

$ cd /mnt/
$ /bin/ls
inputs

$ /bin/ls -l
total 3
drwxr-xr-x   1 root     root           0 May 21 23:03 ./
drwxr-xr-x   1 root     root           0 May 21 23:03 ../
drwxr-xr-x   1 root     root           0 May 21 23:03 inputs/

$ cd inputs
$ cat file1
this is file1
this is file1
this is file1
this is file1
$

$ cp file1 /var/tmp
$
$ cp /etc/hosts /mnt

$ chmod 777 file1
$ chown nobody file1

Usage: umount mount_point

# umount /mnt
#
# pwd
/mnt
# umount /mnt




トップ   新規 一覧 検索 最終更新   ヘルプ   最終更新のRSS