Skip to content

Instantly share code, notes, and snippets.

@lamngockhuong
Forked from mosheeshel/ConsumeHeap.java
Created May 3, 2019 07:19
Show Gist options
  • Select an option

  • Save lamngockhuong/08b46b5c972ccfb8e0139fefa48c2a26 to your computer and use it in GitHub Desktop.

Select an option

Save lamngockhuong/08b46b5c972ccfb8e0139fefa48c2a26 to your computer and use it in GitHub Desktop.

Revisions

  1. @mosheeshel mosheeshel revised this gist Jun 7, 2017. No changes.
  2. @mosheeshel mosheeshel created this gist Jun 7, 2017.
    26 changes: 26 additions & 0 deletions ConsumeHeap.java
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,26 @@
    import java.io.IOException;
    import java.util.Vector;

    /**
    * Created by moshee
    * on: 07/06/17
    * to compile in place: `javac ConsumeHeap.java`
    * Execute: `java -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/var/log/app-`date +%s`-pid$$.hprof -XX:OnOutOfMemoryError=/opt/app/bin/upload_dump_s3.sh -Xmx2m ConsumeHeap`
    * HeapDumpOnOutOfMemoryError specifies to automatically create a dump when OOM occures
    * HeapDumpPath supplies a path to put that file
    * OnOutOfMemoryError specifies a script(s) to run after dump is created (in this case upload_dump_s3.sh)
    */
    public class ConsumeHeap {

    public static void main(String[] args) throws IOException {
    Vector v = new Vector();
    while (true)
    {
    byte b[] = new byte[1048576];
    v.add(b);
    Runtime rt = Runtime.getRuntime();
    System.out.println( "free memory: " + rt.freeMemory() );
    }

    }
    }
    34 changes: 34 additions & 0 deletions upload_dump_s3.sh
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,34 @@
    #!/bin/bash
    timestamp()
    {
    date +"%Y-%m-%d %T"
    }

    LOG_FILE="/var/log/s3_upload.log"
    exec > >(tee -a $LOG_FILE) # directs stdout to log file
    exec 2>&1 # and also to console

    ec2InstanceId=`hostname`

    export AWS_ACCESS_KEY_ID=$HEAPDUMP_UP_AWS_ACCESS_KEY_ID
    export AWS_SECRET_ACCESS_KEY=$HEAPDUMP_UP_AWS_SECRET_ACCESS_KEY
    export AWS_DEFAULT_REGION=us-east-1

    NOW=$(date +"%Y%m%d%H%M%S")
    expirationDate=$(date -d $(date +"%Y/%m/%"d)+" 30 days" +%Y/%m/%d)

    echo "$(timestamp): look for heap dumps to upload "

    cd /var/log/

    for hprof_file in *.hprof
    do
    echo "$(timestamp): Processing $hprof_file file..."
    gzip $hprof_file
    aws s3 cp ${hprof_file}.gz "s3://s3-bucket/${ec2InstanceId}_v${BUILD_NUMBER}_${NOW}.gz" --expires $expirationDate
    rm ${hprof_file}.gz
    echo "$(timestamp): upload dump successfuly"
    done

    echo "$(timestamp): done heap dump loop"