Dowemo

to compress intermediate results commands:
hive -e & set mapreduce.map. output.compress=true; set mapred.map. output.compress.codec = org.apache.hadoop.io.compress.SnappyCodec; <query-string>"

hadoop fs -help
1. hadoop fs -ls '/home/ask/answers/pdate=2014-12-18/'
a # listing the contents of the directory specified by the path, showing each entry in the name, permission, owner, size, and modification date.

2. hadoop fs -lsr '/home/ask/answers/pdate=2014-12-18/'
a # behavior is similar to ls, but all subdirectories of the path are recursively displayed.

3. hadoop fs -du '/home/ask/answers/pdate=2014-12-18/'
# displays disk usage, in bytes, for all files, which match paths; file name reports use full hdfs protocol prefix.

4. hadoop fs -dus '/home/ask/answers/pdate=2014-12-18/'
# similar to du, but the summary of disk usage for all files/directories in the print path.

5. hadoop fs -mv '/home/ask/answers/pdate=2014-12-18/'&/histo '
a # is moved to represent src to dest, in hdfs files or directories.

6. hadoop fs -cp '/home/ask/answers/pdate=2014-12-18/'&/histo '
# determines the file or directory in src in hdfs replication to dest.

7. hadoop fs -rm '/home/ask/answers/pdate=2014-12-18/'
empty directory where # deletes files or paths.

8. hadoop fs -rmr '/home/ask/answers/pdate=2014-12-18/'
a file or directory that # deletes the path id. recursively deletes all child entries ( for example, a subdirectory of a file or path ).

9. hadoop fs -put './local '/histo '
# copied to dest from the dfs identity file or directory in the local localsrc file system.

10. hadoop fs -copyfromlocal './local '/histo '
# is equivalent to put

11. hadoop fs -movefromlocal './local '/histo '
# copies the hdfs from the file or directory in the localsrc local file system to dest, and then deletes the local replica.

12. hadoop fs -get '/histo './local '
a # copy identifies to determine the hdfs file or directory, and get can only copy one file, localdest local file system path

13. hadoop fs -getmerge '/histo './local '
a # retrieves all files in the src hdfs of the matching path and copies them to the identity of the localdest local file system.

14. hadoop fs -cat '/home/ask/answers/pdate=2014-12-18/answers_2014-12-18.txt'
a # displays the contents of a standard output file name.

15. hadoop fs -copytolocal <src> <localdest>
# is equivalent to -get

16. hadoop fs -movetolocal <src> <localdest>
a # works like get, but the replication succeeded.

17. hadoop fs -mkdir <path>
# in creating a directory path named hdfs. create a path missing from any parent directory ( for example, command mkdir p in linux ).

18. hadoop fs -setrep [ -R ] [ -w ] rep <path>
a # setting identifies the target file replication factor that represents the file. the actual replication factor moves toward the target over time.

19. hadoop fs -touchz <path>
a # creates a file that contains the current time as a timestamp. failure if the file already exists in the path, unless the file is already size 0.

20. hadoop fs -test - [ ezd ] <path>
a # returns 1 if the path exists; the length is zero; or a directory, 0.

21. hadoop fs -stat [ format ] <path>
path information about # printing. a % is a string that accepts block size ( b ), file name ( % n ), block size ( % o ), replication ( % r ), and modified date ( % y, % y ).

22. hadoop fs -tail [ -f ] <file2name>
a # display at the end of the standard output file.

23. chmod [ -R ] mode, mode,. <path>.
,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,. if you don't specify a range, you aren't applicable.

24 [ -R ] [ owner ] [: [ group ] ] <path>.
# sets the file or directory that has the user and group names. set owner recursion if specified.

25. hadoop fs -chgrp [ -R ] group <path>.
a file or directory that # sets the group identity path to. sets group recursion if you specify r.

26. hadoop fs -help <cmd-name>
a # returns information that uses one of the commands listed above. must be omitted '- 'character in cmd.






Copyright © 2011 Dowemo All rights reserved.    Creative Commons   AboutUs